Beispiel #1
0
        //Pushes the better networks further down the array
        private static int Partition(NN[] array, int min, int max)
        {
            Random r     = new Random();
            NN     pivot = array[(min + (max - min) / 2)];
            int    i     = min - 1;
            int    j     = max + 1;

            do
            {
                //Randomly assign color
                bool isW = r.Next(0, 2) == 1 ? true : false;
                //While pivot wins to array[i]
                do
                {
                    i++;
                } while (i < j && (pivot.SetColor(isW)).Vs(array[i].SetColor(!isW)));
                //And loses to array[j]
                do
                {
                    j--;
                } while (j > i && !(pivot.SetColor(isW)).Vs(array[j].SetColor(!isW)));
                if (i > j)
                {
                    return(j);
                }
                Swap(array, i, j);
            } while (true);
        }
Beispiel #2
0
        public NN Crossover(NN parent1, NN parent2)
        {
            var r     = new Random();
            NN  child = new NN();

            child.Init();
            for (int i = 0; i < parent1.Layers.Count; i++)
            {
                int crossoverPointX = r.Next(0, parent1.Layers[i].Weights.GetLength(0));
                int crossoverPointY = r.Next(0, parent1.Layers[i].Weights.GetLength(1));
                for (int ii = 0; ii < parent1.Layers[i].Weights.GetLength(0); ii++)
                {
                    for (int iii = 0; iii < parent1.Layers[i].Weights.GetLength(1); iii++)
                    {
                        double gene = parent1.Layers[i].Weights[ii, iii];
                        if (ii > crossoverPointX && iii > crossoverPointY)
                        {
                            gene = parent2.Layers[i].Weights[ii, iii];
                        }
                        child.Layers[i].Weights[ii, iii] = gene;
                    }
                }
            }
            return(child);
        }
Beispiel #3
0
        public NN[] Load()
        {
            var nns = new NN[PopSize];

            for (int i = 0; i < PopSize; i++)
            {
                nns[i] = IO.Read(i);
            }
            return(nns);
        }
Beispiel #4
0
        public NN[] GeneratePopulation(int popsize)
        {
            var nns = new NN[popsize];

            for (int i = 0; i < popsize; i++)
            {
                var nn = new NN();
                nn.Init();
                nns[i] = nn;
            }
            Save(nns);
            return(nns);
        }
Beispiel #5
0
        public NN Mutation(NN patient, double probability)
        {
            var r = new Random();

            if (r.NextDouble() < probability)
            {
                return(patient);
            }
            int mutationLayer  = r.Next(0, patient.Layers.Count);
            int mutationPointX = r.Next(0, patient.Layers[mutationLayer].Weights.GetLength(0));
            int mutationPointY = r.Next(0, patient.Layers[mutationLayer].Weights.GetLength(1));

            //"Flip the bit" so to speak to mutate
            patient.Layers[mutationLayer].Weights[mutationPointX, mutationPointY]
                = 1d - patient.Layers[mutationLayer].Weights[mutationPointX, mutationPointY];
            return(patient);
        }
Beispiel #6
0
        public bool Vs(NN competitor)
        {
            Board Compitition = new Board(new Player(true), new Player(false), new Piece[8, 8], true).initBoard();
            int   movecount   = 0;

            //Compete until a victor is decided or movecount is exceeded
            while (!Compitition.WWin && !Compitition.BWin && movecount < MaxMoves)
            {
                //Ensure nothing goes wrong
                if (Compitition is null)
                {
                    break;
                }

                if (player.IsW == Compitition.WTurn)
                {
                    Compitition = Move(Compitition); ActiveForm.ActiveBoard = Serializer.DeepClone(Compitition);
                }
                else
                {
                    Compitition = competitor.Move(Compitition); ActiveForm.ActiveBoard = Serializer.DeepClone(Compitition);
                }
                movecount++;
            }
            //Award win to the victor
            if ((player.IsW && Compitition.WWin) || (!player.IsW && Compitition.BWin))
            {
                return(true);
            }
            if ((!player.IsW && Compitition.WWin) || (player.IsW && Compitition.BWin))
            {
                return(false);
            }
            //If a stalemate occurs, give the win to the NN with more pieces left
            if (movecount >= MaxMoves)
            {
                if (Score(Compitition, false) > 0)
                {
                    return(true);
                }
                return(false);
            }
            //If it broke without anything happening something went wrong
            throw new Exception("Unknown board state");
        }
Beispiel #7
0
        public static void Write(NN nn, int num)
        {
            FileStream   fs = new FileStream(BasePath + "\\" + num.ToString() + ".txt", FileMode.Create, FileAccess.Write, FileShare.None);
            StreamWriter sw = new StreamWriter(fs);

            sw.Write(nn.NumLayers + " ");
            foreach (Layer l in nn.Layers)
            {
                sw.Write(l.Length + " " + l.InputLength + " ");
                for (int i = 0; i < l.Length; i++)
                {
                    for (int ii = 0; ii < l.InputLength; ii++)
                    {
                        sw.Write(l.Weights[i, ii] + " ");
                    }
                    sw.Write(l.Biases[i] + " ");
                }
            }
            sw.Close(); fs.Close();
        }
Beispiel #8
0
        public static NN Read(int num)
        {
            NN nn = new NN();

            if (Running)
            {
                throw new Exception("Already accessing file");
            }
            Running = true;
            FileStream   fs   = new FileStream(BasePath + "\\" + num.ToString() + ".txt", FileMode.Open, FileAccess.Read, FileShare.None);
            StreamReader sr   = new StreamReader(fs);
            string       text = sr.ReadToEnd();

            sr.Close(); fs.Close();
            string[] split = text.Split(' ');

            int numlayers = int.Parse(split[0]);

            nn.Layers = new List <Layer>();

            int iterator = 1;

            for (int j = 0; j < numlayers; j++)
            {
                int length      = int.Parse(split[iterator]); iterator++;
                int inputlength = int.Parse(split[iterator]); iterator++;
                nn.Layers.Add(new Layer(length, inputlength));
                for (int i = 0; i < length; i++)
                {
                    for (int ii = 0; ii < inputlength; ii++)
                    {
                        nn.Layers[j].Weights[i, ii] = double.Parse(split[iterator]);
                        iterator++;
                    }
                    nn.Layers[j].Biases[i] = double.Parse(split[iterator]);
                    iterator++;
                }
            }
            Running = false;
            return(nn);
        }