Пример #1
0
        public static void Run()
        {
            NeuralNet Winner = Neuralator.Neuralate();

            Console.WriteLine("Winner!");
            Console.WriteLine("Score out of 256: {0}", Winner.TotalScore);
            Console.WriteLine("Confidence: {0}", Winner.TotalAvgConfidence);

            Console.WriteLine("Press any key to continue...");
            Console.ReadKey();
        }
Пример #2
0
        internal NeuralLayer(NeuralNet ParentNeuralNet, UInt16 LayerIndex, uint NumberOfNeuronsInLayer)
        {
            this.LayerIndex      = LayerIndex;
            this.ParentNeuralNet = ParentNeuralNet;

            NeuronsInLayer = new List <Neuron>();

            UInt32 UniqueID = ((UInt32)LayerIndex << 16);

            for (UInt16 iter = 0; iter < NumberOfNeuronsInLayer; iter += 1)
            {
                NeuronsInLayer.Add(new Neuron(this, (UniqueID | iter)));
            }
        }
Пример #3
0
        internal static void DisplayGenerationStats(int Generation, NeuralNet BestInGeneration, TimeSpan PropogationTime, TimeSpan NonPropogationTime)
        {
            int iter = 0;

            Console.WriteLine("Time:                 {" + iter++ + "}\n" +
                              "Propogation Time:     {" + iter++ + "}\n" +
                              "Meddling Time:        {" + iter++ + "}\n" +
                              "Generation:           {" + iter++ + "}\n" +
                              "Top Score:            {" + iter++ + "} / {" + iter++ + "}\n" +
                              "Sum Of Custom Error:  {" + iter++ + "}\n" +
                              "Sum Of Roots Error:   {" + iter++ + "}\n" +
                              "Sum Of Squares Error: {" + iter++ + "}\n" +
                              "Gens Since Imprv:     {" + iter++ + "}\n" +
                              "\n",
                              TotalTimer.Elapsed,
                              PropogationTime,
                              NonPropogationTime,
                              Generation,
                              BestInGeneration.TotalScore, NumberOfTrainingDatumToUse,
                              BestInGeneration.SumOfCustomError,
                              BestInGeneration.SumOfRootsError,
                              BestInGeneration.SumOfSquaresError,
                              GensSinceLastImprv);
        }
Пример #4
0
        internal static NeuralNet Breed(NeuralNet Father, NeuralNet Mother)
        {
            NeuralNet Child = new NeuralNet(NeuralLayerInfo);

            for (UInt16 LayerIter = 0; LayerIter < NeuralLayerInfo.Count(); LayerIter += 1)
            {
                NeuralLayer CurrentLayer = Child.NeuralLayers[LayerIter];

                for (int NeuronIter = 0; NeuronIter < CurrentLayer.NeuronsInLayer.Count(); NeuronIter += 1)
                {
                    Neuron CurrentNeuron = CurrentLayer.NeuronsInLayer[NeuronIter];

                    for (int DendriteIter = 0; DendriteIter < CurrentNeuron.OutputConnections.Count(); DendriteIter += 1)
                    {
                        Dendrite CurrentDendrite = CurrentNeuron.OutputConnections[DendriteIter];

                        if (GlobalRandom.NextDouble() < Math.Min(MaxMutationRate, (BaseMutationRate + (MutationRateIncreasePerFailedGeneration * GensSinceLastImprv))))
                        {
                            CurrentDendrite.SetNewRandomConnectionStrength();
                        }
                        else
                        {
                            // Children are always random
                            switch (BreedingType)
                            {
                            case EnumBreedingType.AlwaysRandom:

                                CurrentDendrite.SetNewRandomConnectionStrength();

                                break;


                            // Child dendrite connection strength is the average of the mother and father
                            case EnumBreedingType.AverageValue:

                                CurrentDendrite.ConnectionStrength =
                                    (Father.NeuralLayers[LayerIter].NeuronsInLayer[NeuronIter].OutputConnections[DendriteIter].ConnectionStrength +
                                     Mother.NeuralLayers[LayerIter].NeuronsInLayer[NeuronIter].OutputConnections[DendriteIter].ConnectionStrength) / 2;

                                break;


                            // Child takes fathers or mothers dendrite connection strength
                            default:
                            case EnumBreedingType.Human:

                                CurrentDendrite.ConnectionStrength = (GlobalRandom.NextDouble() < 0.5)
                                    ? Father.NeuralLayers[LayerIter].NeuronsInLayer[NeuronIter].OutputConnections[DendriteIter].ConnectionStrength
                                    : Mother.NeuralLayers[LayerIter].NeuronsInLayer[NeuronIter].OutputConnections[DendriteIter].ConnectionStrength;

                                break;


                            // Child dendrite connection strength pulled up and down by agreement between father and mother
                            case EnumBreedingType.WeightedPull:

                                if (Father.NeuralLayers[LayerIter].NeuronsInLayer[NeuronIter].OutputConnections[DendriteIter].ConnectionStrength >= 0.5 &&
                                    Mother.NeuralLayers[LayerIter].NeuronsInLayer[NeuronIter].OutputConnections[DendriteIter].ConnectionStrength >= 0.5)
                                {
                                    CurrentDendrite.ConnectionStrength = (float)Math.Min(1.0, Math.Max(
                                                                                             Father.NeuralLayers[LayerIter].NeuronsInLayer[NeuronIter].OutputConnections[DendriteIter].ConnectionStrength,
                                                                                             Mother.NeuralLayers[LayerIter].NeuronsInLayer[NeuronIter].OutputConnections[DendriteIter].ConnectionStrength
                                                                                             ) + 0.01);
                                }
                                else
                                if (Father.NeuralLayers[LayerIter].NeuronsInLayer[NeuronIter].OutputConnections[DendriteIter].ConnectionStrength < 0.5 &&
                                    Mother.NeuralLayers[LayerIter].NeuronsInLayer[NeuronIter].OutputConnections[DendriteIter].ConnectionStrength < 0.5)
                                {
                                    CurrentDendrite.ConnectionStrength = (float)Math.Max(0, Math.Min(
                                                                                             Father.NeuralLayers[LayerIter].NeuronsInLayer[NeuronIter].OutputConnections[DendriteIter].ConnectionStrength,
                                                                                             Mother.NeuralLayers[LayerIter].NeuronsInLayer[NeuronIter].OutputConnections[DendriteIter].ConnectionStrength
                                                                                             ) - 0.01);
                                }
                                else
                                {
                                    CurrentDendrite.ConnectionStrength = (GlobalRandom.NextDouble() < 0.5)
                                            ? Father.NeuralLayers[LayerIter].NeuronsInLayer[NeuronIter].OutputConnections[DendriteIter].ConnectionStrength
                                            : Mother.NeuralLayers[LayerIter].NeuronsInLayer[NeuronIter].OutputConnections[DendriteIter].ConnectionStrength;
                                }

                                break;
                            }
                        }
                    }
                }
            }

            return(Child);
        }
Пример #5
0
        internal static NeuralNet Neuralate()
        {
            TrainingManager.PopulateTrainingSet();

            for (int iter = 0; iter < NetsPerGeneration; iter += 1)
            {
                GenerationMembers.Add(new NeuralNet(NeuralLayerInfo));
            }

            // run all generations
            TotalTimer.Start();
            PortionTimer.Start();
            for (int Generation = 0; Generation < NumberOfGenerations; Generation += 1)
            {
                PortionTimer.Restart();
                // Run all members of the generation over the training set and score them
                Parallel.ForEach(GenerationMembers, Member =>
                {
                    Member.ClearScore();

                    // Loop over all inputs the current Member and score the total
                    for (int input = 0; input < NumberOfTrainingDatumToUse; input += 1)
                    {
                        // fill each net with input data for point
                        Member.ClearNetworkState();
                        Member.SetInputsToFirstLayer(TrainingManager.TrainingSet[input]);

                        Member.Propogate();
                        Member.GetConsensus();

                        var act   = Actual(Member.OutputValue);
                        var exp   = TrainingManager.TrainingSet[input].OutputValue;
                        var Error = Math.Abs(act - exp);

                        if (Error < Resolution)
                        {
                            Member.TotalScore += 1;
                        }
                        else
                        {
                            Member.SumOfSquaresError += Math.Pow(Error, 2);
                            Member.SumOfRootsError   += Math.Pow(Error, 0.5);
                            Member.SumOfCustomError  += (Error + 10 * Math.Sqrt(Error));
                        }

                        Member.TotalAvgConfidence += Member.Confidence;
                    }

                    //Member.TotalAvgConfidence = Math.Max(Member.TotalAvgConfidence, 0.01f);

                    Member.TotalAvgConfidence /= NumberOfTrainingDatumToUse;

                    //Member.SumOfSquaresError /= Member.TotalAvgConfidence;
                    //Member.SumOfRootsError   /= Member.TotalAvgConfidence;
                    //Member.SumOfCustomError  /= Member.TotalAvgConfidence;

                    switch (ErrorSelector)
                    {
                    case ErrorSelectorType.SumOfSquares:
                        Member.TotalError = Member.SumOfSquaresError;
                        break;

                    case ErrorSelectorType.SumOfRoots:
                        Member.TotalError = Member.SumOfRootsError;
                        break;

                    case ErrorSelectorType.SumOfCustom:
                        Member.TotalError = Member.SumOfCustomError;
                        break;

                    default:
                        Member.TotalError = Member.SumOfSquaresError;
                        break;
                    }
                });

                var PropogationTime = PortionTimer.Elapsed;
                PortionTimer.Restart();

                GenerationMembers.Sort();

                if (SortByScore ? (LastGenTopScore >= GenerationMembers.First().TotalScore) : (LastGenLeastError <= GenerationMembers.First().TotalError))
                {
                    GensSinceLastImprv += 1;

                    //if (SortByScore)
                    //{
                    //    if (GensSinceLastImprv >= 200)
                    //    {
                    //        return GenerationMembers[0];
                    //    }
                    //}
                    //if (ErrorSelector == ErrorSelectorType.SumOfSquares)
                    //{
                    //    if(GensSinceLastImprv == 100)
                    //    {
                    //        ErrorSelector = ErrorSelectorType.SumOfRoots;
                    //        LastGenTopScore = GenerationMembers.First().TotalScore;
                    //        LastGenLeastError = GenerationMembers.First().TotalError;
                    //    }
                    //}
                    //else
                    //{
                    //    if (GensSinceLastImprv >= 200)
                    //    {
                    //        SortByScore = true;
                    //    }
                    //}
                }
                else
                {
                    GensSinceLastImprv = 0;
                    LastGenTopScore    = GenerationMembers.First().TotalScore;
                    LastGenLeastError  = GenerationMembers.First().TotalError;
                }

                NeuralNet BestInGeneration = GenerationMembers[0];

                if ((GensSinceLastImprv == 20) || ((GensSinceLastImprv + 1) % 100 == 0))
                {
                    Cull();
                }

                BreedNextGeneration();

                DisplayGenerationStats(Generation, BestInGeneration, PropogationTime, PortionTimer.Elapsed);
            }
            TotalTimer.Stop();

            NeuralNet Winner = GenerationMembers[0];

            foreach (var Member in GenerationMembers)
            {
                if (Member.TotalScore > Winner.TotalScore)
                {
                    Winner = Member;
                }
            }

            return(Winner);
        }