public PCA(double[][] trainingSet, List <string> labels, int numOfComponents) { this.trainingSet = trainingSet; this.labels = labels; this.numOfComponents = numOfComponents; this.meanMatrix = Accord.Statistics.Tools.Mean(this.trainingSet, 0); this.W = getFeature(this.trainingSet, this.numOfComponents); // Construct projectedTrainingMatrix this.projectedTrainingSet = new List <projectedTrainingMatrix>(); for (int i = 0; i < trainingSet.Count(); i++) { projectedTrainingMatrix ptm = new projectedTrainingMatrix(this.W.Transpose().Multiply(trainingSet[i].Subtract(meanMatrix).ToArray()), labels[i]); this.projectedTrainingSet.Add(ptm); } }
public PCA(double[][] trainingSet, List<string> labels, int numOfComponents) { this.trainingSet = trainingSet; this.labels = labels; this.numOfComponents = numOfComponents; this.meanMatrix = Accord.Statistics.Tools.Mean(this.trainingSet,0); this.W = getFeature(this.trainingSet, this.numOfComponents); // Construct projectedTrainingMatrix this.projectedTrainingSet = new List<projectedTrainingMatrix>(); for (int i = 0; i < trainingSet.Count(); i++) { projectedTrainingMatrix ptm = new projectedTrainingMatrix(this.W.Transpose().Multiply(trainingSet[i].Subtract(meanMatrix).ToArray()),labels[i]); this.projectedTrainingSet.Add(ptm); } }
// testFace has been projected to the subspace static projectedTrainingMatrix[] findKNN(projectedTrainingMatrix[] trainingSet, double[][] testFace, int K, EuclideanDistance metric) { int NumOfTrainingSet = trainingSet.Length; // initialization projectedTrainingMatrix[] neighbors = new projectedTrainingMatrix[K]; int i; for (i = 0; i < K; i++) { trainingSet[i].distance = metric.getDistance(trainingSet[i].matrix, testFace); // System.out.println("index: " + i + " distance: " // + trainingSet[i].distance); neighbors[i] = trainingSet[i]; } // go through the remaining records in the trainingSet to find K nearest // neighbors for (i = K; i < NumOfTrainingSet; i++) { trainingSet[i].distance = metric.getDistance(trainingSet[i].matrix, testFace); // System.out.println("index: " + i + " distance: " // + trainingSet[i].distance); int maxIndex = 0; for (int j = 0; j < K; j++) { if (neighbors[j].distance > neighbors[maxIndex].distance) { maxIndex = j; } } if (neighbors[maxIndex].distance > trainingSet[i].distance) { neighbors[maxIndex] = trainingSet[i]; } } return(neighbors); }
// get the class label by using neighbors static String classify(projectedTrainingMatrix[] neighbors) { Dictionary <String, Double> map = new Dictionary <String, Double>(); int num = neighbors.Length; for (int index = 0; index < num; index++) { projectedTrainingMatrix temp = neighbors[index]; String key = temp.label; if (!map.ContainsKey(key)) { map.Add(key, 1 / temp.distance); } else { double value = map[key]; value += 1 / temp.distance; map[key] = value; } } // Find the most likely label double maxSimilarity = 0; String returnLabel = ""; List <String> labelSet = map.Keys.ToList(); foreach (string label in labelSet) { // String label = it.next(); double value = map[label]; if (value > maxSimilarity) { maxSimilarity = value; returnLabel = label; } } return(returnLabel); }
public LDA(double[][] trainingSet, List<string> labels,int numOfComponents) { int n = trainingSet.Length; // sample size HashSet<String> tempSet = new HashSet<String>(labels); int c = tempSet.Count; // class size // process in PCA PCA pca = new PCA(trainingSet, labels, n - c); // classify double[][] meanTotal = new double[n - c][]; for (int i = 0; i < n - c; i++) { meanTotal[i] = new double[1]; } Dictionary<String, List<double[]>> dict = new Dictionary<String, List<double[]>>(); List<projectedTrainingMatrix> pcaTrain = pca.getProjectedTrainingSet(); for (int i = 0; i < pcaTrain.Count; i++) { String key = pcaTrain[i].label; meanTotal= meanTotal.Add(pcaTrain[i].matrix); if (!dict.ContainsKey(key)) { List<double[]> temp = new List<double[]>(); temp.Add(pcaTrain[i].matrix.Transpose()[0]); dict.Add(key, temp); } else { List<double[]> temp = dict[key]; temp.Add(pcaTrain[i].matrix.Transpose()[0]); dict[key]= temp; } } meanTotal.ToMatrix().Multiply((double) 1 / n); // calculate Sw, Sb double[][] Sw = new double[n - c][]; double[][] Sb = new double[n - c][]; for (int i = 0; i < n - c; i++) { Sw[i] = new double[n-c]; Sb[i] = new double[n-c]; } List<String> labelSet = dict.Keys.ToList(); foreach(string label in labelSet) { List<double[]> tempMatrix = dict[label]; double[][] matrixWithinThatClass = tempMatrix.ToArray(); double[] meanOfCurrentClass = Accord.Statistics.Tools.Mean(matrixWithinThatClass); for (int i = 0; i < matrixWithinThatClass.Length; i++) { double[][] temp1 = matrixWithinThatClass[i].ToArray().Subtract(meanOfCurrentClass.ToArray()); temp1 = temp1.Multiply(temp1.Transpose()); Sw =Sw.Add(temp1); } double[][] temp = meanOfCurrentClass.ToArray().Subtract(meanTotal); temp = temp.Multiply(temp.Transpose()).ToMatrix().Multiply((double)matrixWithinThatClass.Length).ToArray(); Sb=Sb.Add(temp); } // calculate the eigenvalues and vectors of Sw^-1 * Sb double [][] targetForEigen = Sw.Inverse().Multiply(Sb); var feature = new EigenvalueDecomposition(targetForEigen.ToMatrix()); double[] d = feature.RealEigenvalues; int[] indexes; d.StableSort(out indexes); indexes = indexes.Reverse().ToArray(); indexes = indexes.Submatrix(0, c - 1); //int[] indexes = getIndexesOfKEigenvalues(d, c - 1); double[][] eigenVectors = feature.Eigenvectors.ToArray(); double[][] selectedEigenVectors = eigenVectors.Submatrix(0, eigenVectors.Length - 1, indexes); this.W = pca.getW().Multiply(selectedEigenVectors); // Construct projectedTrainingMatrix this.projectedTrainingSet = new List<projectedTrainingMatrix>(); for (int i = 0; i < trainingSet.Length; i++) { projectedTrainingMatrix ptm = new projectedTrainingMatrix(this.W.Transpose().Multiply(trainingSet[i].Subtract(pca.meanMatrix).ToArray()), labels[i]); this.projectedTrainingSet.Add(ptm); } this.meanMatrix= pca.meanMatrix; GC.Collect(); }
public LDA(double[][] trainingSet, List <string> labels, int numOfComponents) { int n = trainingSet.Length; // sample size HashSet <String> tempSet = new HashSet <String>(labels); int c = tempSet.Count; // class size // process in PCA PCA pca = new PCA(trainingSet, labels, n - c); // classify double[][] meanTotal = new double[n - c][]; for (int i = 0; i < n - c; i++) { meanTotal[i] = new double[1]; } Dictionary <String, List <double[]> > dict = new Dictionary <String, List <double[]> >(); List <projectedTrainingMatrix> pcaTrain = pca.getProjectedTrainingSet(); for (int i = 0; i < pcaTrain.Count; i++) { String key = pcaTrain[i].label; meanTotal = meanTotal.Add(pcaTrain[i].matrix); if (!dict.ContainsKey(key)) { List <double[]> temp = new List <double[]>(); temp.Add(pcaTrain[i].matrix.Transpose()[0]); dict.Add(key, temp); } else { List <double[]> temp = dict[key]; temp.Add(pcaTrain[i].matrix.Transpose()[0]); dict[key] = temp; } } meanTotal.ToMatrix().Multiply((double)1 / n); // calculate Sw, Sb double[][] Sw = new double[n - c][]; double[][] Sb = new double[n - c][]; for (int i = 0; i < n - c; i++) { Sw[i] = new double[n - c]; Sb[i] = new double[n - c]; } List <String> labelSet = dict.Keys.ToList(); foreach (string label in labelSet) { List <double[]> tempMatrix = dict[label]; double[][] matrixWithinThatClass = tempMatrix.ToArray(); double[] meanOfCurrentClass = Accord.Statistics.Tools.Mean(matrixWithinThatClass); for (int i = 0; i < matrixWithinThatClass.Length; i++) { double[][] temp1 = matrixWithinThatClass[i].ToArray().Subtract(meanOfCurrentClass.ToArray()); temp1 = temp1.Multiply(temp1.Transpose()); Sw = Sw.Add(temp1); } double[][] temp = meanOfCurrentClass.ToArray().Subtract(meanTotal); temp = temp.Multiply(temp.Transpose()).ToMatrix().Multiply((double)matrixWithinThatClass.Length).ToArray(); Sb = Sb.Add(temp); } // calculate the eigenvalues and vectors of Sw^-1 * Sb double [][] targetForEigen = Sw.Inverse().Multiply(Sb); var feature = new EigenvalueDecomposition(targetForEigen.ToMatrix()); double[] d = feature.RealEigenvalues; int[] indexes; d.StableSort(out indexes); indexes = indexes.Reverse().ToArray(); indexes = indexes.Submatrix(0, c - 1); //int[] indexes = getIndexesOfKEigenvalues(d, c - 1); double[][] eigenVectors = feature.Eigenvectors.ToArray(); double[][] selectedEigenVectors = eigenVectors.Submatrix(0, eigenVectors.Length - 1, indexes); this.W = pca.getW().Multiply(selectedEigenVectors); // Construct projectedTrainingMatrix this.projectedTrainingSet = new List <projectedTrainingMatrix>(); for (int i = 0; i < trainingSet.Length; i++) { projectedTrainingMatrix ptm = new projectedTrainingMatrix(this.W.Transpose().Multiply(trainingSet[i].Subtract(pca.meanMatrix).ToArray()), labels[i]); this.projectedTrainingSet.Add(ptm); } this.meanMatrix = pca.meanMatrix; GC.Collect(); }