Beispiel #1
0
        //Pushes the better networks further down the array
        private static int Partition(NN[] array, int min, int max)
        {
            NN  pivot = array[(min + (max - min) / 2)];
            int i     = min - 1;
            int j     = max + 1;

            do
            {
                //While pivot wins to array[i]
                do
                {
                    i++;
                } while (array[i].PercCorrect < pivot.PercCorrect);
                //And loses to array[j]
                do
                {
                    j--;
                } while (array[j].PercCorrect > pivot.PercCorrect);
                if (i >= j)
                {
                    return(j);
                }
                Swap(array, i, j);
            } while (true);
        }
Beispiel #2
0
        public NN Mutation(NN patient, double probability)
        {
            var r = new Random();

            if (r.NextDouble() > probability)
            {
                return(patient);
            }
            for (int i = 0; i < NumberOfMutations; i++)
            {
                int mutationLayer  = r.Next(0, patient.Layers.Count);
                int mutationPointX = r.Next(0, patient.Layers[mutationLayer].Biases.GetLength(0));
                //50% to mutate weight or bias
                if (r.NextDouble() < .5)
                {
                    //"Flip the bit" so to speak to mutate
                    patient.Layers[mutationLayer].Biases[mutationPointX]
                        = 1d - patient.Layers[mutationLayer].Biases[mutationPointX];
                }
                else
                {
                    int mutationPointY = r.Next(0, patient.Layers[mutationLayer].Weights.GetLength(1));
                    //"Flip the bit" so to speak to mutate
                    patient.Layers[mutationLayer].Weights[mutationPointX, mutationPointY]
                        = 1d - patient.Layers[mutationLayer].Weights[mutationPointX, mutationPointY];
                }
            }
            return(patient);
        }
Beispiel #3
0
        public NN Crossover(NN parent1, NN parent2, bool side)
        {
            var r     = new Random();
            NN  child = new NN();

            child.Init();
            for (int i = 0; i < parent1.Layers.Count; i++)
            {
                int crossoverPointX = r.Next(0, parent1.Layers[i].Weights.GetLength(0));
                int crossoverPointY = r.Next(0, parent1.Layers[i].Weights.GetLength(1));
                for (int ii = 0; ii < parent1.Layers[i].Weights.GetLength(0); ii++)
                {
                    for (int iii = 0; iii < parent1.Layers[i].Weights.GetLength(1); iii++)
                    {
                        double gene = parent1.Layers[i].Weights[ii, iii];
                        if (ii > crossoverPointX && iii > crossoverPointY)
                        {
                            gene = parent2.Layers[i].Weights[ii, iii];
                        }
                        child.Layers[i].Weights[ii, iii] = gene;
                    }
                    double bgene = parent1.Layers[i].Biases[ii];
                    if (ii > crossoverPointX)
                    {
                        bgene = parent2.Layers[i].Biases[ii];
                    }
                    child.Layers[i].Biases[ii] = bgene;
                }
            }
            return(child);
        }
Beispiel #4
0
        public NN[] Load()
        {
            var nns = new NN[PopSize];

            for (int i = 0; i < PopSize; i++)
            {
                nns[i] = IO.Read(i);
            }
            return(nns);
        }
Beispiel #5
0
 void GeneratePopulation(int popsize)
 {
     NNs = new NN[popsize];
     for (int i = 0; i < popsize; i++)
     {
         var nn = new NN();
         nn.Init();
         NNs[i] = nn;
     }
     Save();
 }
Beispiel #6
0
        public static void Write(NN nn, int num)
        {
            FileStream   fs = new FileStream(BasePath + "\\" + num.ToString() + ".txt", FileMode.Create, FileAccess.Write, FileShare.None);
            StreamWriter sw = new StreamWriter(fs);

            sw.Write(nn.NumLayers + " ");
            foreach (Layer l in nn.Layers)
            {
                sw.Write(l.Length + " " + l.InputLength + " ");
                for (int i = 0; i < l.Length; i++)
                {
                    for (int ii = 0; ii < l.InputLength; ii++)
                    {
                        sw.Write(l.Weights[i, ii] + " ");
                    }
                    sw.Write(l.Biases[i] + " ");
                }
            }
            sw.Close(); fs.Close();
        }
Beispiel #7
0
        public static NN Read(int num)
        {
            NN nn = new NN();

            if (Running)
            {
                throw new Exception("Already accessing file");
            }
            Running = true;
            FileStream   fs   = new FileStream(BasePath + "\\" + num.ToString() + ".txt", FileMode.Open, FileAccess.Read, FileShare.None);
            StreamReader sr   = new StreamReader(fs);
            string       text = sr.ReadToEnd();

            sr.Close(); fs.Close();
            string[] split = text.Split(' ');

            int numlayers = int.Parse(split[0]);

            nn.Layers = new List <Layer>();

            int iterator = 1;

            for (int j = 0; j < numlayers; j++)
            {
                int length      = int.Parse(split[iterator]); iterator++;
                int inputlength = int.Parse(split[iterator]); iterator++;
                nn.Layers.Add(new Layer(length, inputlength));
                for (int i = 0; i < length; i++)
                {
                    for (int ii = 0; ii < inputlength; ii++)
                    {
                        nn.Layers[j].Weights[i, ii] = double.Parse(split[iterator]);
                        iterator++;
                    }
                    nn.Layers[j].Biases[i] = double.Parse(split[iterator]);
                    iterator++;
                }
            }
            Running = false;
            return(nn);
        }