Esempio n. 1
0
        //Stochastic Gradient Descent algortithm
        public static void StochasticGradient(int batchsize, Brain brain)
        {
            //Take data
            MNISTReader.DigitImage        image;
            List <MNISTReader.DigitImage> samples = TrainingImages.ToList();

            //Shuffle it
            Shuffle(samples);
            //For each epoch:
            var total = samples.Count;

            while (samples.Count != 0)
            {
                Console.WriteLine("Percent Learning Sample: " + String.Format("{0:F5}", (total - samples.Count) / (1.0 * total)));
                //Treat "batchsize" number of samples
                for (int i = 0; i < batchsize; i++)
                {
                    //For each sample
                    if (samples.Count > 0)
                    {
                        image = samples[0];
                        samples.RemoveAt(0);
                        Vector arrayvector = ConvertImageToVector(image);
                        //Input to the network
                        brain.SetInitialActivation(arrayvector);
                        //Forward propagation
                        brain.FeedForward();
                        //Backpropagation
                        Vector result = new Vector(10);
                        result.data[image.label] = 1;
                        brain.BackPropagation(result);
                    }
                }
                //Update the weights
                brain.Update(batchsize);
            }
        }