/*
         * [STAThread]
         * static void Main(string[] args)
         * {
         *  using (OpenFileDialog dlg = new OpenFileDialog())
         *  {
         *      dlg.Title = "Open Image";
         *      dlg.Filter = "bmp files (*.jpg)|*.jpg";
         *
         *      if (dlg.ShowDialog() == DialogResult.OK)
         *      {
         *          // Create a new Bitmap object from the picture file on disk,
         *          // and assign that to the PictureBox.Image property
         *          var bitmap = new Bitmap(dlg.FileName);
         *
         *          Preprocessing.Preprocessing preprocessing = new Preprocessing.Preprocessing(bitmap, 10, 20);
         *          string directoryGrayscale = Path.GetDirectoryName(dlg.FileName) + "\\Grayscale.bmp";
         *          preprocessing.GrayscalingImage.Save(directoryGrayscale, ImageFormat.Bmp);
         *
         *          string directoryFilteringImage = Path.GetDirectoryName(dlg.FileName) + "\\Filtering.bmp";
         *          preprocessing.FilterImage.Save(directoryFilteringImage, ImageFormat.Bmp);
         *
         *          string directoryThresholdBinary = Path.GetDirectoryName(dlg.FileName) + "\\AverageBinary.bmp";
         *          preprocessing.AverageBinaryImage.Save(directoryThresholdBinary, ImageFormat.Bmp);
         *
         *          Console.WriteLine(directoryGrayscale);
         *          Console.WriteLine(directoryFilteringImage);
         *          Console.WriteLine(directoryThresholdBinary);
         *
         *          Console.WriteLine("ProfileProjection");
         *          ProfileProjection.ProfileProjection profileProjection = new ProfileProjection.ProfileProjection(preprocessing.Output,10,25);
         *          var resultImageHorizontally = profileProjection.ResultImageAfterSliceHorizontally;
         *          int i = 0;
         *          foreach(Bitmap result in resultImageHorizontally)
         *          {
         *              string diretorySlicing = Path.GetDirectoryName(dlg.FileName) + "\\slicing-" + i + ".bmp";
         *              Console.WriteLine(diretorySlicing);
         *              result.Save(diretorySlicing, ImageFormat.Bmp);
         *              i++;
         *          }
         *          //ProfileProjection profileProjection = new ProfileProjection(preprocessing.Output);
         *          //var profileProjectionFeature =  profileProjection.ProjectionFeature;
         *
         *          /*
         *          for(int i = 0; i < profileProjectionFeature.Length; i++)
         *          {
         *              Console.WriteLine(profileProjectionFeature[i]);
         *          }
         *
         *          Console.WriteLine("==================================");
         *          MomentsHu momentsHu = new MomentsHu(preprocessing.Output);
         *          var moments = momentsHu.Moments;
         *          Console.WriteLine("Moments");
         *          for(int i = 0; i < moments.Length; i++)
         *          {
         *              Console.WriteLine("Moments " + i + " : " + moments[i]);
         *          }
         *
         *
         *
         *          Console.ReadKey();
         *      }
         *  }
         * }*/

        static void Main(string[] args)
        {
            Console.WriteLine("\nBegin neural network back-propagation demo");

            int numInput  = 100; // number features
            int numHidden = 500;
            int numOutput = 36;  // number of classes for Y
            int numRows   = 100;
            int seed      = 1;   // gives nice demo

            Console.WriteLine("\nGenerating " + numRows +
                              " artificial data items with " + numInput + " features");
            double[][] allData = MakeAllData(numInput, numHidden, numOutput,
                                             numRows, seed);
            Console.WriteLine("Done");

            //ShowMatrix(allData, allData.Length, 2, true);

            Console.WriteLine("\nCreating train (80%) and test (20%) matrices");
            double[][] trainData;
            double[][] testData;
            SplitTrainTest(allData, 0.80, seed, out trainData, out testData);
            Console.WriteLine("Done\n");

            Console.WriteLine("Training data:");
            ShowMatrix(trainData, 4, 2, true);
            Console.WriteLine("Test data:");
            ShowMatrix(testData, 4, 2, true);

            Console.WriteLine("Creating a " + numInput + "-" + numHidden +
                              "-" + numOutput + " neural network");
            BackpropagationNeuralNetwork.BackpropagationNeuralNetwork nn = new BackpropagationNeuralNetwork.BackpropagationNeuralNetwork(numInput, numHidden, numOutput, true);

            int    maxEpochs = 100;
            double learnRate = 0.05;
            double momentum  = 0.01;

            Console.WriteLine("\nSetting maxEpochs = " + maxEpochs);
            Console.WriteLine("Setting learnRate = " + learnRate.ToString("F2"));
            Console.WriteLine("Setting momentum  = " + momentum.ToString("F2"));

            Console.WriteLine("\nStarting training");
            Layers weights = nn.Train(trainData, maxEpochs, learnRate, momentum);

            Console.WriteLine("Done");
            Console.WriteLine("\nFinal neural network model weights and biases:\n");
            ShowVector(weights, 2, 10, true);

            //double[] y = nn.ComputeOutputs(new double[] { 1.0, 2.0, 3.0, 4.0 });
            //ShowVector(y, 3, 3, true);

            double trainAcc = nn.Accuracy(trainData);

            Console.WriteLine("\nFinal accuracy on training data = " +
                              trainAcc.ToString("F4"));

            double testAcc = nn.Accuracy(testData);

            Console.WriteLine("Final accuracy on test data     = " +
                              testAcc.ToString("F4"));

            Console.WriteLine("\nEnd back-propagation demo\n");
            Console.ReadLine();
        } // Main
Beispiel #2
0
        private void backgroundBPNNTraining(List <ImageDTO> data)
        {
            int inputNeuron  = data[0].MomentHu.Length;
            int hiddenNeuron = Convert.ToInt32(numHiddenNeuron.Value);
            int layerNeuron  = Convert.ToInt32(numLayers.Value);
            int outputNeuron = Constants.LENGTH_ARRAYS_BITS;

            backpropagationNeuralNetwork = new BackpropagationNeuralNetwork.BackpropagationNeuralNetwork(inputNeuron, hiddenNeuron, outputNeuron, true);
            int    maxEpochs = Convert.ToInt32(numMaxEpochs.Value);
            double learnRate = Convert.ToDouble(numLearningRate.Value);
            double momentum  = Convert.ToDouble(numMomentum.Value);

            Console.WriteLine("===============================You Can See this Logs at Console===============================");

            Console.WriteLine("Prepearing Load data from Memory");
            List <double[]> dataTraining = new List <double[]>();

            for (int i = 0; i < data.Count; i++)
            {
                double[] dataRows = new double[inputNeuron + Constants.LENGTH_ARRAYS_BITS];
                for (int j = 0; j < data[i].MomentHu.Length; j++)
                {
                    dataRows[j] = data[i].MomentHu[j];
                }
                int k = 0;
                for (int j = data[i].MomentHu.Length; j < dataRows.Length; j++)
                {
                    dataRows[j] = data[i].ArrayBinaryofClass[k];
                    k++;
                }
                dataTraining.Add(dataRows);
            }

            Console.WriteLine("\nSetting maxEpochs = " + maxEpochs);
            Console.WriteLine("Setting learnRate = " + learnRate.ToString("F2"));
            Console.WriteLine("Setting momentum  = " + momentum.ToString("F2"));

            Console.WriteLine("\nStarting training");
            Layers weights = backpropagationNeuralNetwork.Train(dataTraining.ToArray(), maxEpochs, learnRate, momentum);

            Console.WriteLine("Done");
            Console.WriteLine("\nFinal neural network model weights and biases:\n");

            Console.WriteLine("Panjang Neurons " + weights.NumNeuron);
            if (checkDisplayWeight.Checked)
            {
                this.Invoke(new MethodInvoker(delegate
                {
                    dgViewWeightResult.ColumnCount = weights.NumNeuron;
                    for (int i = 0; i < weights.NumNeuron; i++)
                    {
                        dgViewWeightResult.Columns[i].Name = "Neuron " + i;
                    }
                    string[] arrays = new string[weights.NumNeuron];
                    for (int i = 0; i < weights.NumNeuron; i++)
                    {
                        arrays[i] = weights.Neurons[i].Value.ToString();
                    }
                    dgViewWeightResult.Rows.Add(arrays);
                }));
            }
        }
        static double[][] MakeAllData(int numInput, int numHidden,
                                      int numOutput, int numRows, int seed)
        {
            Random rnd        = new Random(seed);
            int    numWeights = (numInput * numHidden) + numHidden +
                                (numHidden * numOutput) + numOutput;
            Layers weights = new Layers(numWeights, nameof(weights)); // actually weights & biases

            for (int i = 0; i < numWeights; ++i)
            {
                weights.Neurons[i].Value = 20.0 * rnd.NextDouble() - 10.0; // [-10.0 to 10.0]
            }
            Console.WriteLine("Generating weights and biases:");
            ShowVector(weights, 2, 10, true);

            double[][] result = new double[numRows][]; // allocate return-result
            for (int i = 0; i < numRows; ++i)
            {
                result[i] = new double[numInput + numOutput]; // 1-of-N in last column
            }
            BackpropagationNeuralNetwork.BackpropagationNeuralNetwork gnn =
                new BackpropagationNeuralNetwork.BackpropagationNeuralNetwork(numInput, numHidden, numOutput); // generating NN
            gnn.SetWeights(weights);

            for (int r = 0; r < numRows; ++r) // for each row
            {
                // generate random inputs
                Layers inputs = new Layers(numInput, nameof(inputs));
                for (int i = 0; i < numInput; ++i)
                {
                    inputs.Neurons[i].Value = 20.0 * rnd.NextDouble() - 10.0; // [-10.0 to -10.0]
                }
                // compute outputs
                Layers outputs = gnn.ComputeOutputs(inputs);

                // translate outputs to 1-of-N
                double[] oneOfN = new double[numOutput]; // all 0.0

                int    maxIndex = 0;
                double maxValue = outputs.Neurons[0].Value;
                for (int i = 0; i < numOutput; ++i)
                {
                    if (outputs.Neurons[i].Value > maxValue)
                    {
                        maxIndex = i;
                        maxValue = outputs.Neurons[i].Value;
                    }
                }
                oneOfN[maxIndex] = 1.0;

                // place inputs and 1-of-N output values into curr row
                int c = 0;                         // column into result[][]
                for (int i = 0; i < numInput; ++i) // inputs
                {
                    result[r][c++] = inputs.Neurons[i].Value;
                }
                for (int i = 0; i < numOutput; ++i) // outputs
                {
                    result[r][c++] = oneOfN[i];
                }
            } // each row
            return(result);
        }     // MakeAllData