Пример #1
0
        public Network(string InputFileName, int _MiniBatchSize, string[] _ListOfFilenames, bool MNIST)//ConvNet with binarized, labelled image-dataset
        {
            L2_reg  = 0.01;
            IsMNIST = MNIST;
            if (!MNIST)
            {
                NoOfImages      = 10000;//"images per fileName-location" (data_batch_1,data_batch_2,data_batch_3,data_batch_4,data_batch_5,test_batch contain each 10000 images)
                MiniBatchSize   = _MiniBatchSize;
                ListOfFilenames = _ListOfFilenames;
                CostFunction    = CostFunction.CrossEntropy;
                Bl     = new ImageLoader.BinLoader(InputFileName, MiniBatchSize);
                Labels = Bl.GetLabels(0);
                Add(new InputLayer(Bl.GetZ_3D(0), MiniBatchSize));
                //long trained network
                //Add(new ConvolutionalLayer(this[Count - 1].Z_3D, 5, 2, 10, MiniBatchSize, ActivationFunction.Sigmoid, WeightInitialization.NormalizedGaussianRandom, TrainAlgorithm.Adam, 0.00001 * MiniBatchSize, 0.01 / MiniBatchSize, 1));
                //Add(new ConvolutionalLayer(this[Count - 1].Z_3D, 5, 3, 16, MiniBatchSize, ActivationFunction.Sigmoid, WeightInitialization.NormalizedGaussianRandom, TrainAlgorithm.Adam, 0.00001 * MiniBatchSize, 0.01 / MiniBatchSize, 1));
                //Add(new ConvolutionalLayer(this[Count - 1].Z_3D, 3, 1, 16, MiniBatchSize, ActivationFunction.Sigmoid, WeightInitialization.NormalizedGaussianRandom, TrainAlgorithm.Adam, 0.00001 * MiniBatchSize, 0.01 / MiniBatchSize, 1));
                //Add(new HiddenLayer(this[Count - 1].Z, 120, MiniBatchSize, ActivationFunction.Sigmoid, WeightInitialization.Random, TrainAlgorithm.Adam, 0.00001 * MiniBatchSize, 0.1 / MiniBatchSize, 1));
                //Add(new HiddenLayer(this[Count - 1].Z, 84, MiniBatchSize, ActivationFunction.Sigmoid, WeightInitialization.Random, TrainAlgorithm.Adam, 0.00001 * MiniBatchSize, 0.1 / MiniBatchSize, 1));
                //Add(new OutputLayer(this[Count - 1].Z, Labels.GetLength(1), MiniBatchSize, ActivationFunction.SoftMax, WeightInitialization.Random, TrainAlgorithm.Adam, 0.00001 * MiniBatchSize, 0.01 / MiniBatchSize, 1));

                Add(new ConvolutionalLayer(this[Count - 1].Z_3D, 3, 1, 10, MiniBatchSize, ActivationFunction.lReLu, WeightInitialization.NormalizedGaussianRandom, TrainAlgorithm.SGD, 0.00001 * MiniBatchSize, L2_reg, 1));
                Add(new ConvolutionalLayer(this[Count - 1].Z_3D, 5, 2, 24, MiniBatchSize, ActivationFunction.lReLu, WeightInitialization.NormalizedGaussianRandom, TrainAlgorithm.SGD, 0.00001 * MiniBatchSize, L2_reg, 1));
                Add(new HiddenLayer(this[Count - 1].Z, 120, MiniBatchSize, ActivationFunction.Sigmoid, WeightInitialization.Random, TrainAlgorithm.SGD, 0.00001 * MiniBatchSize, L2_reg, 1));
                Add(new HiddenLayer(this[Count - 1].Z, 84, MiniBatchSize, ActivationFunction.Sigmoid, WeightInitialization.Random, TrainAlgorithm.SGD, 0.00001 * MiniBatchSize, L2_reg, 1));
                Add(new OutputLayer(this[Count - 1].Z, Labels.GetLength(1), MiniBatchSize, ActivationFunction.SoftMax, WeightInitialization.Random, TrainAlgorithm.SGD, 0.00001 * MiniBatchSize, L2_reg, 1));
            }
            else//if MNIST-Data
            {
                ListOfFilenames = _ListOfFilenames;
                NoOfImages      = 60000;//"images per fileName-location"(t10k-images.idx3-ubyte contains 60000 images)
                MiniBatchSize   = _MiniBatchSize;
                CostFunction    = CostFunction.CrossEntropy;
                MNIST_Load      = new ImageLoader.MNIST_Loader(ListOfFilenames, MiniBatchSize, true);
                Labels          = MNIST_Load.GetLabels(0);
                Add(new InputLayer(MNIST_Load.GetZ_3D(0), MiniBatchSize));
                Add(new ConvolutionalLayer(this[Count - 1].Z_3D, 3, 1, 6, MiniBatchSize, ActivationFunction.lReLu, WeightInitialization.NormalizedGaussianRandom, TrainAlgorithm.SGD, 0.0001 * MiniBatchSize, L2_reg, 1));
                Add(new ConvolutionalLayer(this[Count - 1].Z_3D, 3, 2, 16, MiniBatchSize, ActivationFunction.lReLu, WeightInitialization.NormalizedGaussianRandom, TrainAlgorithm.SGD, 0.0001 * MiniBatchSize, L2_reg, 1));
                Add(new HiddenLayer(this[Count - 1].Z, 120, MiniBatchSize, ActivationFunction.Sigmoid, WeightInitialization.Random, TrainAlgorithm.SGD, 0.0001 * MiniBatchSize, L2_reg, 1));
                Add(new HiddenLayer(this[Count - 1].Z, 84, MiniBatchSize, ActivationFunction.Sigmoid, WeightInitialization.Random, TrainAlgorithm.SGD, 0.0001 * MiniBatchSize, L2_reg, 1));
                Add(new OutputLayer(this[Count - 1].Z, Labels.GetLength(1), MiniBatchSize, ActivationFunction.SoftMax, WeightInitialization.Random, TrainAlgorithm.SGD, 0.0001 * MiniBatchSize, L2_reg, 1));
            }
        }
Пример #2
0
 public void TestNetwork()
 {
     L2_reg_Temp = 0;
     if (!IsMNIST)
     {
         int correct_Estimations = 0;
         //load test images
         Bl = new ImageLoader.BinLoader(ListOfFilenames[5], MiniBatchSize); //ListOfFilenames[5] are test images
         for (int b = 0; b < 10000 / MiniBatchSize; b++)                    //10000 num of Test Pictures
         {
             Labels       = Bl.GetLabels(b);
             this[0].Z_3D = Bl.GetZ_3D(b);
             Activate();
             for (int i = 0; i < this[this.Count - 1].Z.GetLength(0); i++)// this[this.Count - 1] = OutputLayer || Dimension[0] is MiniBatchSite
             {
                 int    IndexOfEstimatedNumber = 0;
                 double maxOutputval           = 0;
                 int    IndexOfCorrectLabel    = 0;
                 int    tempLapel = 0;
                 //get index of output-value with highest probability
                 for (int j = 0; j < this[this.Count - 1].Z.GetLength(1); j++)// this[this.Count - 1] = OutputLayer || Dimension[1] are Labels
                 {
                     if (this[this.Count - 1].Z[i, j] > maxOutputval)
                     {
                         maxOutputval           = this[this.Count - 1].Z[i, j];
                         IndexOfEstimatedNumber = j;
                     }
                     if (Labels[i, j] > tempLapel)
                     {
                         tempLapel           = (int)Labels[i, j];
                         IndexOfCorrectLabel = j;
                     }
                 }
                 if (IndexOfEstimatedNumber == IndexOfCorrectLabel)
                 {
                     correct_Estimations++;
                 }
             }
             OnNetAccuracyEvent((double)correct_Estimations / ((double)((b + 1) * MiniBatchSize)));
         }
         Bl = new ImageLoader.BinLoader(ListOfFilenames[EpocheMiniBatchCounter], MiniBatchSize);
     }
     else
     {
         int correct_Estimations = 0;
         //load test images
         MNIST_Load = new ImageLoader.MNIST_Loader(ListOfFilenames, MiniBatchSize, false);
         for (int b = 0; b < 10000 / MiniBatchSize; b++)//10000 num of Test Pictures
         {
             Labels       = MNIST_Load.GetLabels(b);
             this[0].Z_3D = MNIST_Load.GetZ_3D(b);
             Activate();
             for (int i = 0; i < this[this.Count - 1].Z.GetLength(0); i++)// this[this.Count - 1] = OutputLayer || Dimension[0] is MiniBatchSite
             {
                 int    IndexOfEstimatedNumber = 0;
                 double maxOutputval           = 0;
                 int    IndexOfCorrectLabel    = 0;
                 int    tempLapel = 0;
                 //get index of output-value with highest probability
                 for (int j = 0; j < this[this.Count - 1].Z.GetLength(1); j++)// this[this.Count - 1] = OutputLayer || Dimension[1] are Labels
                 {
                     if (this[this.Count - 1].Z[i, j] > maxOutputval)
                     {
                         maxOutputval           = this[this.Count - 1].Z[i, j];
                         IndexOfEstimatedNumber = j;
                     }
                     if (Labels[i, j] > tempLapel)
                     {
                         tempLapel           = (int)Labels[i, j];
                         IndexOfCorrectLabel = j;
                     }
                 }
                 if (IndexOfEstimatedNumber == IndexOfCorrectLabel)
                 {
                     correct_Estimations++;
                 }
             }
             OnNetAccuracyEvent((double)correct_Estimations / ((double)((b + 1) * MiniBatchSize)));
         }
         //load again TrainImages
         MNIST_Load = new ImageLoader.MNIST_Loader(ListOfFilenames, MiniBatchSize, true);
     }
     L2_reg_Temp = L2_reg;
 }
Пример #3
0
        public double Train()
        {
            if (!IsMNIST)
            {
                //int how_Mane_Epoches_per_Minibatch = 1;//if 1, it changes minibatch on each epoche
                if (epoche % (NoOfImages / MiniBatchSize * how_Mane_Epoches_per_Minibatch) == 0 && epoche > 0)
                {
                    if (EpocheMiniBatchCounter == 5)//ListOfFilenames[0] to ListOfFilenames[4] are training images, ListOfFilenames[5] are test images
                    {
                        EpocheMiniBatchCounter = 0;
                    }
                    //Bl = new ImageLoader.BinLoader(ListOfFilenames[EpocheMiniBatchCounter], MiniBatchSize);//is replaced at the end of the TestNetwork-Method
                    EpocheMiniBatchCounter++;
                    BatchCounter++;
                    OnBadgeErrorEvent(accumulatedError / (NoOfImages / MiniBatchSize * how_Mane_Epoches_per_Minibatch));
                    accumulatedError = 0;
                    OnSaveWeightsEvent();
                    TestNetwork();
                }
                if (Bl != null)
                {
                    //change MiniBatch all "how_Mane_Epoches_per_Minibatch" Epoches
                    if (epoche % how_Mane_Epoches_per_Minibatch == 0)
                    {
                        Labels       = Bl.GetLabels((epoche - BatchCounter * NoOfImages / MiniBatchSize * how_Mane_Epoches_per_Minibatch) / how_Mane_Epoches_per_Minibatch);
                        this[0].Z_3D = Bl.GetZ_3D((epoche - BatchCounter * NoOfImages / MiniBatchSize * how_Mane_Epoches_per_Minibatch) / how_Mane_Epoches_per_Minibatch);
                    }
                }
                //für jeden neuen Durchgang (aufgerufen aus MainWindwo.xmal per TrainNetwork()) wird der fehler auf 0 gesetzt
                Activate();
                double error = BackProp();
                UpdateWeights();
                epoche++;

                //OnBadgeErrorEvent(error);
                //accumulatedError = 0;

                accumulatedError += error;
                return(error);
            }
            else//if MNIST-Data
            {
                //int how_Mane_Epoches_per_Minibatch = 1;//if 1, it changes minibatch on each epoche
                if (epoche % (NoOfImages / MiniBatchSize * how_Mane_Epoches_per_Minibatch) == 0 && epoche > 0)
                {
                    BatchCounter++;
                    OnBadgeErrorEvent(accumulatedError / (NoOfImages / MiniBatchSize * how_Mane_Epoches_per_Minibatch));
                    accumulatedError = 0;
                    OnSaveWeightsEvent();
                    TestNetwork();
                }
                if (MNIST_Load != null)
                {
                    //change MiniBatch all "how_Mane_Epoches_per_Minibatch" Epoches
                    if (epoche % how_Mane_Epoches_per_Minibatch == 0)
                    {
                        Labels       = MNIST_Load.GetLabels((epoche - BatchCounter * NoOfImages / MiniBatchSize * how_Mane_Epoches_per_Minibatch) / how_Mane_Epoches_per_Minibatch);
                        this[0].Z_3D = MNIST_Load.GetZ_3D((epoche - BatchCounter * NoOfImages / MiniBatchSize * how_Mane_Epoches_per_Minibatch) / how_Mane_Epoches_per_Minibatch);
                    }
                }
                //für jeden neuen Durchgang (aufgerufen aus MainWindwo.xmal per TrainNetwork()) wird der fehler auf 0 gesetzt
                Activate();
                double error = BackProp();
                UpdateWeights();
                epoche++;

                //OnBadgeErrorEvent(error);
                //accumulatedError = 0;

                accumulatedError += error;
                return(error);
            }
        }