private void btnLoadMNIST_Click(object sender, EventArgs e) { try { /* * Use the DataPoint[] * Get double[][] from DataPoint[] * array of array, should be easily converted to double[][] */ //String trainDir = "..\\..\\..\\..\\..\\..\\handouts\\data\\trainingAll60000"; String trainDir = "..\\..\\..\\..\\..\\..\\handouts\\data\\train"; //String testDir = "..\\..\\..\\..\\..\\..\\handouts\\data\\testAll10000"; Stopwatch sw = new Stopwatch(); sw.Start(); DataPoint[] data = ImageReader.ReadAllDataScaled(trainDir); double[][] trainData = ImageReader.GetData(data); sw.Stop(); MessageBox.Show("Time taken to read the trainer data " + sw.ElapsedMilliseconds.ToString()); int[] layers = { 100, trainData[0].Count() }; // neurons in hidden layer, ouput layer sparse_encoder = new Network(trainData[0].Count(), layers); // # of inputs sparse_encoder.randomizeAll(); sparse_encoder.LearningAlg.ErrorTreshold = 0.0001f; sparse_encoder.LearningAlg.MaxIteration = 10000; //sparse_encoder = Network.load("sparse_encoder"); sw.Restart(); sparse_encoder.LearningAlg.Learn(trainData, trainData); sw.Stop(); MessageBox.Show("Done training...Time taken " + sw.ElapsedMilliseconds.ToString()); /* Save the auto encoder learn */ sparse_encoder.save("sparse_encoder"); double[][] expectedOutputs = ImageReader.ExpectedOutput(data); int[] nnLayers = { 100, expectedOutputs[0].Length }; // neurons in hidden layer, ouput layer nn = new Network(trainData[0].Count(), nnLayers); // # of inputs /* No need to randmize, get weight and baise from sparse_encoder */ for (int i = 0; i < sparse_encoder.layers[0].NumNeurons; i++) { nn.layers[0].Neurons[i].weights = sparse_encoder.layers[0].Neurons[i].weights; nn.layers[0].Neurons[i].Bias = sparse_encoder.layers[0].Neurons[i].Bias; } //nn.randomizeAll(); nn.LearningAlg.ErrorTreshold = 0.0001f; nn.LearningAlg.MaxIteration = 10000; sw.Restart(); nn.LearningAlg.Learn(trainData, expectedOutputs); sw.Stop(); MessageBox.Show("Done training...Time taken " + sw.ElapsedMilliseconds.ToString()); nn.save("nn_ae"); } catch (Exception ex) { MessageBox.Show(ex.Message); } }
private void btnLoadPCA_Click(object sender, EventArgs e) { try { /* * STEPS * 1- Convert image to grayscale * 2- Convert to 2-D image i.e. conversion to vector */ //String trainDir = "..\\..\\..\\..\\..\\..\\handouts\\data\\trainingAll60000"; String trainDir = "..\\..\\..\\..\\..\\..\\handouts\\data\\train"; //String testDir = "..\\..\\..\\..\\..\\..\\handouts\\data\\testAll10000"; //string trainDir = "..\\..\\..\\..\\..\\..\\handouts\\AttDataSet\\ATTDataSet\\Training"; Stopwatch sw = new Stopwatch(); sw.Start(); DataPoint[] data = ImageReader.ReadAllDataUnscaled(trainDir); //double[][] trainData = ImageReader.ReadAllData(trainDir); double[][] trainDataOrig = ImageReader.GetData(data); double[][] trainData = PCA.Transpose(trainDataOrig, trainDataOrig[0].Length); sw.Stop(); MessageBox.Show("Time taken to read the trainer data " + sw.ElapsedMilliseconds.ToString()); /* * STEPS: * 3- Compute the mean vector of all test images * 4- Subtract mean vector from each image * 5- Compute covariant matrix of all test images * pass the vector through the PCA * then pass that data through NN */ sw.Restart(); iMean = PCA.FindMean(trainData); PCA.SubMean(trainData, iMean); double[][] covariance = PCA.Covariance(trainData); /* Compute the eigan values (values are sorted) */ PCALib.Matrix mapackMatrix = new PCALib.Matrix(covariance); PCALib.IEigenvalueDecomposition EigenVal = mapackMatrix.GetEigenvalueDecomposition(); /* select the top 50 Eigen values */ int top = 50; #if DEBUG /* * we don't need the eigen values * because the eigen vactors are already * calculated by mapack library */ double[] topEigen = new double[top]; PCA.GetTopN(EigenVal.RealEigenvalues, topEigen, top); #endif // DEBUG /* get Eigen vector */ double[][] EigenVector = PCA.GetEigenVector(EigenVal.EigenvectorMatrix, top); /* multiply eigen vector with vector that has mean substracted */ EigenFaceImage = PCA.Multiply(trainData, EigenVector); /* Project each image on to reduced top dimensional space */ double[][] transposeInput = PCA.Transpose(EigenFaceImage, EigenFaceImage[0].Length); double[][] transposeTrainData = PCA.Transpose(trainData, trainData[0].Length); projectionInput = PCA.Multiply(transposeTrainData, EigenFaceImage); sw.Stop(); MessageBox.Show("Done PCA...Time taken " + sw.ElapsedMilliseconds.ToString()); // 256094 normal vs 211514 parallel double[][] image = PCA.ConvertToPixels(transposeInput); int iNo = 0; foreach (Control obj in groupbox1.Controls) { if (obj is PictureBox) { obj.BackgroundImage = PCA.Draw(image, iNo++, trainDir); } } double[][] expectedOutputs = ImageReader.ExpectedOutput(data); int[] layers = { 50, 10 }; // neurons in hidden layer, ouput layer nn = new Network(projectionInput[0].Length, layers); // # of inputs nn.randomizeAll(); nn.LearningAlg.ErrorTreshold = 0.0001f; nn.LearningAlg.MaxIteration = 10000; sw.Restart(); nn.LearningAlg.Learn(projectionInput, expectedOutputs); sw.Stop(); MessageBox.Show("Done training...Time taken " + sw.ElapsedMilliseconds.ToString()); nn.save("nn_pca"); } catch (Exception ex) { MessageBox.Show(ex.Message); } }