Esempio n. 1
0
        public static void Play(Board b)
        {
            Board  b2 = GoDiePointers.DeepClone(b);
            Player PW = new Player(true); NeuralNet NNW = new NeuralNet(PW, 3, 10);

            Data.ReadNs(NNW);
            Player PB = new Player(false); NeuralNet NNB = new NeuralNet(PB, 3, 10);

            Data.ReadNs(NNB);
            foreach (Piece p in b2.Pieces)
            {
                if (p is Empty)
                {
                    continue;
                }
                if (p.Player.IsW == true)
                {
                    p.Player = PW;
                }
                else
                {
                    p.Player = PB;
                }
            }
            List <Neuron> BestNeurons = GoDiePointers.DeepClone(NNW.Neurons);

            Random random             = new Random();

            //Amount of weights to change
            int changeCount = 5;

            for (int j = 0; j < changeCount; j++)
            {
                //For neurons
                //It increases/decreases the weight by rand.next(x, y)% [normally]
                //It currently is used as an input for the sigmoid (as a randomizing factor)
                double randomVal = random.Next(-14, 14);
                //For pieces
                double pieceRVal  = random.Next(1, 19) / 10.00;
                int    randNeuron = random.Next(0, NNW.Neurons.Count);
                int    randthing  = random.Next(1, 2);
                int    X          = random.Next(0, 7);
                int    Y          = random.Next(0, 7);
                try
                {
                    if (randthing == 1)
                    {
                        if (BestNeurons[randNeuron].layer == 0)
                        {
                            NNW.Neurons[randNeuron].weights[X, Y] =
                                Sigmoid.sigmoid(randomVal);
                        }
                        else
                        {
                            KeyValuePair <Neuron, double> kvp = NNW.Neurons[randNeuron].layWeights.ElementAt(random.Next(0, NNW.Neurons[randNeuron].layWeights.Count));
                            NNW.Neurons[randNeuron].layWeights[kvp.Key] =
                                Sigmoid.sigmoid(randomVal);
                        }
                    }
                    if (randthing == 2)
                    {
                        if (BestNeurons[randNeuron].layer == 0)
                        {
                            NNB.Neurons[randNeuron].weights[X, Y] = Sigmoid.sigmoid(randomVal);
                        }
                        else
                        {
                            KeyValuePair <Neuron, double> kvp = NNB.Neurons[randNeuron].layWeights.ElementAt(random.Next(0, NNB.Neurons[randNeuron].layWeights.Count));
                            NNB.Neurons[randNeuron].layWeights[kvp.Key] =
                                Sigmoid.sigmoid(randomVal);
                        }
                    }

                    /*
                     * Disabled for now
                     * also, it has a 50% chance of selecting the empty squares with the current x/y randomizer
                     * //Changing class values?
                     * if (randthing == 3)
                     * {
                     *  b.Pieces[X, Y].CVal = (int)(pieceRVal * (GoDiePointers.DeepClone(b.Pieces[X, Y].CVal)));
                     * }
                     * //Repeat to equalize chances of neuron vs piece
                     * if (randthing == 4)
                     * {
                     *  b2.Pieces[Y, X].CVal = (int)(pieceRVal * (GoDiePointers.DeepClone(b.Pieces[X, Y].CVal)));
                     * }
                     */
                }
                catch (Exception ex) { Console.WriteLine(ex); return; }
            }

            //At movecap, end playing, and write whoever had a higher score to the weight list file
            int moveCap = 100;
            int i       = 1;

            //While it has not moved too many times, and while no-one has won, play
            //Run in parallel?

            //Using two boards to allow for different piece cvals, unless I want to put that into the NN class?
            while (i <= moveCap && !b.WWin && !b.BWin && !b2.WWin && !b2.BWin && !b.Stale && !b2.Stale)
            {
                if (b.WTurn)
                {
                    b2.Pieces = NNW.Move(b, true).Pieces; Board.PrintBoard(b2); b2.WTurn = false; i++;
                }
                if (!b2.WTurn)
                {
                    b.Pieces = NNB.Move(b2, false).Pieces; Board.PrintBoard(b); b.WTurn = true; i++;
                }
                else
                {
                    Console.WriteLine("NN Failure"); break;
                }
            }
            //Will need to check whether pieces read/write properly in the future

            //If white won, write white's data
            if (b.WWin || b2.WWin) /*Data.WritePieces(b);*/ Data {
Esempio n. 2
0
 public void initNN()
 {
     try
     {
         //Randomize the weights
         Random r = new Random();
         for (int i = 0; i <= depth; i++)
         {
             if (i == 0)
             {
                 for (int ii = 0; ii <= count - 1; ii++)
                 {
                     //Foreach space in the l1 weight array, randomize it
                     double[,] temps = new double[, ]
                     {
                         { Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)) },
                         { Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)) },
                         { Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)) },
                         { Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)) },
                         { Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)) },
                         { Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)) },
                         { Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)) },
                         { Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)) }
                     };
                     Neuron n = new Neuron(this, temps, 0, 0);
                 }
             }
             if (i >= 1 && i <= depth - 1)
             {
                 for (int ii = 0; ii <= count - 1; ii++)
                 {
                     Neuron n = new Neuron(this, new Dictionary <Neuron, double>(), 0, i);
                     n.layWeights.Clear();
                     foreach (Neuron neu in Neurons)
                     {
                         if (neu.layer == n.layer - 1)
                         {
                             if (!n.layWeights.ContainsKey(neu))
                             {
                                 //Make a connection of random weight to each neuron with a layer n - 1 lower
                                 n.layWeights.Add(neu, /* Weight for the neuron */ Sigmoid.sigmoid(r.Next(0, 999) / 100));
                             }
                         }
                     }
                 }
             }
             if (i == depth)
             {
                 //Make the final neuron (output)
                 Neuron n = new Neuron(this, new Dictionary <Neuron, double>(), 0, i);
                 n.layWeights.Clear();
                 foreach (Neuron neu in Neurons)
                 {
                     if (neu.layer == n.layer - 1)
                     {
                         if (!n.layWeights.ContainsKey(neu))
                         {
                             //Make a connection of random weight to each neuron with a layer n - 1 lower
                             n.layWeights.Add(neu, /* Weight for the neuron */ Sigmoid.sigmoid(r.Next(0, 999) / 100));
                         }
                     }
                 }
                 //Output of the NN is this neuron
                 Output = n;
             }
         }
     }
     //If it fails, print the error out
     catch (Exception ex) { Console.WriteLine(ex); }
 }
Esempio n. 3
0
 public void initNN()
 {
     try
     {
         Random r = new Random();
         for (int i = 0; i <= depth; i++)
         {
             if (i == 0)
             {
                 for (int ii = 0; ii <= count - 1; ii++)
                 {
                     double[,] temps = new double[, ]
                     {
                         { Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)) },
                         { Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)) },
                         { Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)) },
                         { Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)) },
                         { Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)) },
                         { Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)) },
                         { Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)) },
                         { Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)), Sigmoid.sigmoid(r.Next(-9, 9)) }
                     };
                     Neuron n = new Neuron(this, temps, 0, 0);
                 }
             }
             if (i >= 1 && i <= depth - 1)
             {
                 for (int ii = 0; ii <= count - 1; ii++)
                 {
                     Neuron n = new Neuron(this, new Dictionary <Neuron, double>(), 0, i);
                     n.layWeights.Clear();
                     foreach (Neuron neu in Neurons)
                     {
                         if (neu.layer == n.layer - 1)
                         {
                             if (!n.layWeights.ContainsKey(neu))
                             {
                                 n.layWeights.Add(neu, /* Weight for the neuron */ Sigmoid.sigmoid(r.Next(0, 999) / 100));
                             }
                         }
                     }
                 }
             }
             if (i == depth)
             {
                 Neuron n = new Neuron(this, new Dictionary <Neuron, double>(), 0, i);
                 n.layWeights.Clear();
                 foreach (Neuron neu in Neurons)
                 {
                     if (neu.layer == n.layer - 1)
                     {
                         if (!n.layWeights.ContainsKey(neu))
                         {
                             n.layWeights.Add(neu, /* Weight for the neuron */ Sigmoid.sigmoid(r.Next(0, 999) / 100));
                         }
                     }
                 }
                 Output = n;
             }
         }
     }
     catch { initNN(); }
 }