Пример #1
0
        public void ClickFunction(object sender, EventArgs e)
        {
            DateTime startTime = DateTime.Now;
            String   reply     = funkyTextBox.Text;

            //TEST MAYBE IAMGES
            reply = "Maybe/Image" + (reply) + ".png";
            try
            {
                NeuralNet.TestData(reply, NeuralNet.testLayer, 1 - NeuralNet.highestDifferenceWhenRight);
            }
            catch (Exception z)
            {
                NeuralNet.DebugWrite("Main->Testing() ->", "Error while reading image", false);
            }
            //END MODULAR TEST SECTION
            Console.WriteLine("Done, it took " + (DateTime.Now.Subtract(startTime)));
            this.Invalidate();
        }
Пример #2
0
        /// <summary>
        /// CreateSubCell uses <see cref="Bitmap"/> functions to segregate the root image and fill the
        /// 'subCell' array with he relevant subsections of the root image.
        /// </summary>
        /// <param name="i">The X co-ord of the subsection to create</param>
        /// <param name="f">he Y co-ord of the subsection ot create</param>
        /// <returns>The created subcell made from the specified sub-image of the root image</returns>
        public override Cell CreateSubCell(int i, int f)
        {
            Bitmap subImage = new Bitmap(img.Width / 2, img.Height / 2);

            NeuralNet.DebugWrite("ImageCell->Cell()->", String.Format("Image Width/Height is {0},{1}", img.Width, img.Height), false);
            // Create new subImage
            for (int z = 0; z < subImage.Height; z++)
            {
                for (int x = 0; x < subImage.Height; x++)
                {
                    subImage.SetPixel(z, x, img.GetPixel((img.Width / 2 * i) + z, (img.Width / 2 * f) + x));
                }
            }
            if (subCells[i, f] != null)
            {
                ImageCell.ChangeImage((ImageCell)subCells[i, f], subImage);
            }
            else
            {
                subCells[i, f] = new ImageCell(subImage);
            }
            return(subCells[i, f]);
        }
Пример #3
0
 public void LoadFunction(object sender, EventArgs e)
 {
     NeuralNet.RunNeuralNet();
 }
Пример #4
0
        static void Main(string[] args)
        {
            //float[,] _inp = {
            //    { 0, 0 },
            //    { 0, 1 },
            //    { 1, 1 },
            //    { 1, 0 },
            //};

            //float[,] _ans =
            //{
            //    { 0 },
            //    { 1 },
            //    { 0 },
            //    { 1 }
            //};

            Console.WriteLine("Loading Network data...");

            string[] _sInp = File.ReadAllLines(@"C:\Users\Emil\Downloads\mnist-in-csv\mnist_train.csv");

            string[][] s = _sInp.Select(x => x.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries)).ToArray();

            var b = s.Select(x => new
            {
                Answer = int.Parse(x[0]),
                Inputs = x.Skip(1).ToArray()
            }).ToArray();

            float[,] _inp = new float[b.GetLength(0), b[0].Inputs.Length];
            float[,] _ans = new float[b.GetLength(0), 10];

            for (int i = 0; i < _inp.GetLength(0); i++)
            {
                _ans[i, b[i].Answer] = 0.99f;

                for (int k = 0; k < _inp.GetLength(1); k++)
                {
                    _inp[i, k] = float.Parse(b[i].Inputs[k]) / 255;
                }
            }

            NeuralNet a = new NeuralNet(784, 100, 10)
            {
                BatchSize = 200,
                LearningRate = 0.3f,
                ShuffleData = true,
            };

            a.SetMonitor(DisplayNetwork);
            a.Training(_inp, _ans, 18);

            Console.WriteLine("Guessing...");

            Console.WriteLine(a.Guess(a.SliceArr(_inp, 5)));
            Console.WriteLine(new Matrix(1, 10, a.SliceArr(_ans, 5)));
            Console.WriteLine(a.Guess(a.SliceArr(_inp, 1)));
            Console.WriteLine(new Matrix(1, 10, a.SliceArr(_ans, 1)));
            Console.WriteLine(a.Guess(a.SliceArr(_inp, 2)));
            Console.WriteLine(new Matrix(1, 10, a.SliceArr(_ans, 2)));

            Console.ReadKey();
        }
Пример #5
0
 public ImageCell(Bitmap subsection) : base()
 {
     img = subsection;
     NeuralNet.DebugWrite("ImageCell->Cell()->", String.Format("Image Width/Height is {0},{1}", img.Width, img.Height), false); // Confirm Iamge is real and halfing each repitition
     BuildValuesNew();
 }
Пример #6
0
        private void TeachXor()
        {
            var population = new List<NetHolder>();
            var genes = new List<Genome>();
            var populationCount = 100;
            var numberOfGenerations = 5000;
            var trainingSet = 1000;
            var initialFitness = 0;
            for (int i = 0; i < populationCount; i++)
            {
                var neuralNet = new NeuralNet(2, 1, 2, 6);
                var genome = new Genome(neuralNet.GetTotalInputs());
                genome.Fitness = initialFitness; // Set initial fitness
                var holder = new NetHolder() {Genome = genome, NeuralNet = neuralNet};
                this.SetWeights(holder);
                population.Add(holder);
                genes.Add(genome);
            }

            var algorithm = new GeneticAlgorithm(genes, 0.1, 0.7, 0.3, (int) (populationCount * 0.1));
            var random = new Random();
            var inputHolders = new List<InputHolder>();
            var maxValue = 1;
            trainingSet = 4;
            inputHolders.Add(new InputHolder() {X = 1, Y = 1});
            inputHolders.Add(new InputHolder() {X = 1, Y = 0});
            inputHolders.Add(new InputHolder() {X = 0, Y = 1});
            inputHolders.Add(new InputHolder() {X = 0, Y = 0});

            for (int i = 0; i < numberOfGenerations; i++)
            {
                Console.WriteLine("Generation {0}", i);
                int generationCorrect = 0;
                double maxError = double.MinValue;
                double minError = double.MaxValue;
                double averageError = 0;
                int errorIteration = 0;

                double minFitness = double.MaxValue;
                double maxFitness = double.MinValue;
                double averageFitness = 0;
                int fitnessIteration = 0;
                foreach (var netHolder in population)
                {

                    netHolder.Genome.Fitness = initialFitness;
                    int totalCorrect = 0;
                    foreach (var input in inputHolders)
                    {
                        var list =
                            netHolder.NeuralNet.Update(new List<double>() {input.X, input.Y});
                        var output = list[0];

                        var expectedOutput = (int) input.X ^ (int) input.Y;
                        var error = Math.Abs(output - expectedOutput);

                        averageError = (averageError * errorIteration + error) /
                                       ++errorIteration;

                        if (netHolder.MaxError > error)
                        {
                            netHolder.MaxError = error;
                        }
                        lock (this)
                        {
                            if (error > maxError)
                            {
                                maxError = error;
                            }
                            if (error < minError)
                            {
                                minError = error;
                            }
                        }
                         var bonus = 100.0;
                         var coef = 1.0;
                         if (error > 0)
                         {
                             coef = (maxValue - error) / (maxValue);
                         }
                         else
                         {
                             totalCorrect++;
                         }

                         bonus = bonus * coef;

                         double fitnessPenalty = bonus / trainingSet;

                        netHolder.Genome.Fitness += fitnessPenalty;
                        /*   Console.WriteLine("{0} -> output: {1}, expected: {2}, error: {3}", j, output, expectedOutput,
            Math.Round(error, 2));*/
                    }

                    //netHolder.Genome.Fitness += totalCorrect;
                    generationCorrect += totalCorrect;

                    //Console.WriteLine("{0} -> Fitness: {1}", j, population[j].Genome.Fitness);
                    lock (this)
                    {
                        if (netHolder.Genome.Fitness > maxFitness)
                        {
                            maxFitness = netHolder.Genome.Fitness;
                        }
                        if (netHolder.Genome.Fitness < minFitness)
                        {
                            minFitness = netHolder.Genome.Fitness;
                        }
                        averageFitness = (averageFitness * fitnessIteration +
                                          netHolder.Genome.Fitness) /
                                         ++fitnessIteration;
                    }
                }

                if (generationCorrect / populationCount == trainingSet)
                {
                    Console.WriteLine("Found solution");
                    for (int j = 0; j < 1000; j++)
                    {
                        int x = random.Next(1);
                        int y = random.Next(1);
                        var z = population[0].NeuralNet.Update(new List<double>() {x, y})[0];
                        if ((x ^ y) != z)
                        {
                        }
                    }
                }

                Console.WriteLine("Fitness: min: {0} max: {1}, avg: {2}", Math.Round(minFitness, 2),
                                  Math.Round(maxFitness, 2), Math.Round(averageFitness, 2));
                Console.WriteLine("Error: min: {0} max: {1}, avg: {2}, corr: {3}", Math.Round(minError, 2),
                                  Math.Round(maxError, 2), Math.Round(averageError, 2), generationCorrect);
                var newPopulation = algorithm.Epoch();
                for (int j = 0; j < newPopulation.Count; j++)
                {
                    population[j].Genome = newPopulation[j];
                }
                foreach (var netHolder in population)
                {
                    this.SetWeights(netHolder);
                }
            }

            NetHolder best = population[0];
            foreach (var netHolder in population)
            {
                if (netHolder.Genome.Fitness > best.Genome.Fitness)
                {
                    best = netHolder;
                }
            }

            best.NeuralNet.Update(0, 0);
        }
Пример #7
0
        private void TeachSum()
        {
            var population = new List<NetHolder>();
            var genes = new List<Genome>();
            var populationCount = 100;
            var numberOfGenerations = 200;
            var trainingSet = 100;
            var initialFitness = 0;
            for (int i = 0; i < populationCount; i++)
            {
                var neuralNet = new NeuralNet(2, 1, 3, 6);
                var genome = new Genome(neuralNet.GetTotalInputs());
                genome.Fitness = initialFitness; // Set initial fitness
                var holder = new NetHolder(){Genome = genome, NeuralNet = neuralNet};
                this.SetWeights(holder);
                population.Add(holder);
                genes.Add(genome);
            }

            NetHolder bestEver = new NetHolder()
                                     {
                                         Genome = new Genome(new List<double>()) {Fitness = double.MinValue},
                                         NeuralNet = new NeuralNet(2, 1, 3, 6)
                                     };

            var algorithm = new GeneticAlgorithm(genes, 0.1, 0.7, 0.3, (int) (populationCount * 0.1));
            var random = new Random();
            var inputHolders = new List<InputHolder>();
            var maxNumber = 5;
            var maxValue = maxNumber * 2;

            inputHolders.Add(new InputHolder() {X = 0, Y = 1});
            inputHolders.Add(new InputHolder() {X = 5, Y = 5});

            trainingSet = inputHolders.Count;

            for (int i = 0; i < numberOfGenerations; i++)
            {
                Console.WriteLine("Generation {0}", i);
                int generationCorrect = 0;
                double maxError = double.MinValue;
                double minError = double.MaxValue;
                double averageError = 0;
                int errorIteration = 0;

                double minFitness = double.MaxValue;
                double maxFitness = double.MinValue;
                double averageFitness = 0;
                int fitnessIteration = 0;
                foreach (var netHolder in population)
                {

                    averageError = TrainNetwork(initialFitness, inputHolders, maxValue, netHolder,
                                                averageError, errorIteration, ref maxError, ref minError,
                                                ref generationCorrect, ref maxFitness, ref minFitness,
                                                ref averageFitness, fitnessIteration);
                }

                foreach (var holder in population)
                {
                    if (holder.MaxError < maxError)
                    {
                        var errorBonus = (maxError - holder.MaxError) * 10;
                        holder.Genome.Fitness += errorBonus;
                    }
                    if (holder.Genome.Fitness > bestEver.Genome.Fitness)
                    {
                        bestEver.Genome = holder.Genome;
                        bestEver.Genome.Fitness = holder.Genome.Fitness;
                        bestEver.MaxError = holder.MaxError;
                        this.SetWeights(bestEver);
                    }
                }

                Console.WriteLine("Fitness: min: {0} max: {1}, avg: {2}", Math.Round(minFitness, 2),
                                  Math.Round(maxFitness, 2), Math.Round(averageFitness, 2));
                Console.WriteLine("Error: min: {0} max: {1}, avg: {2}, corr: {3}", Math.Round(minError, 2),
                                  Math.Round(maxError, 2),  Math.Round(averageError, 2), generationCorrect);
                Console.WriteLine("Best: fitness {0}, max error {1}%", Math.Round(bestEver.Genome.Fitness, 2),
                                  Math.Round(bestEver.MaxError / maxValue * 100, 2));

                var newPopulation = algorithm.Epoch();
                for (int j = 0; j < newPopulation.Count; j++)
                {
                    population[j].Genome = newPopulation[j];
                    population[j].MaxError = 0;
                }
                foreach (var netHolder in population)
                {
                    this.SetWeights(netHolder);
                }
            }

            Console.WriteLine("Completed. Testing best");
            for (int i = 0; i < 10; i++)
            {
                double avg = 0d;
                int iter = 0;
                double maxError = 0;
                double minError =0;
                int generationCorrect=0;
                double maxFitness=0;
                double minFitness=0;
                double averageFitness=0;
                int fitnessIteration=0;
                this.TrainNetwork(initialFitness, inputHolders, maxValue, bestEver, avg, iter, ref maxError, ref minError,
                                  ref generationCorrect, ref maxFitness, ref minFitness, ref averageFitness,
                                  fitnessIteration);
                var x = random.Next(maxNumber);
                var y = random.Next(maxNumber);
                var z = bestEver.NeuralNet.Update(new List<double> {x, y})[0] * maxValue;
                Console.WriteLine("{0} + {1} = {2}", x, y, Math.Round(z, 2));
            }
        }