Esempio n. 1
0
        public virtual ISignalArray activateNetwork(double[] sensors)
        {
            Brain.ResetState();

            // Convert the sensors into an input array for the network
            for (int i = 0; i < sensors.Length; i++)
            {
                Brain.InputSignalArray[i] = sensors[i];
            }

            // Activate the network
            Brain.Activate();

            return(Brain.OutputSignalArray);
        }
        /// <summary>
        /// Gets the next move as dictated by the neural network.
        /// </summary>
        public Move GetMove(SquareTypes[,] board)
        {
            // Clear the network
            Brain.ResetState();

            // Convert the game board into an input array for the network
            setInputSignalArray(Brain.InputSignalArray, board);

            // Activate the network
            Brain.Activate();

            // Find the highest-scoring available move
            Move   move = null;
            double max  = double.MinValue;

            for (int i = 0; i < 3; i++)
            {
                for (int j = 0; j < 3; j++)
                {
                    // If the square is taken, skip it.
                    if (board[i, j] != SquareTypes.N)
                    {
                        continue;
                    }

                    // Set the score for this square.
                    double score = Brain.OutputSignalArray[i * 3 + j];

                    // If this is the first available move we've found,
                    // set it to the current best.
                    if (move == null)
                    {
                        move = new Move(i, j);
                        max  = score;
                    }
                    // If this square has a higher score than any we've
                    // found, set it to the current best.
                    else if (max < score)
                    {
                        move.X = i;
                        move.Y = j;
                        max    = score;
                    }
                }
            }

            return(move);
        }
Esempio n. 3
0
        public void Move(bool canMove)
        {
            var foodNear   = FoodNear ? 0.7 : 0.3;
            var agentsNear = AgentsNear ? 0.7 : 0.3;
            var nearWalls  = (DistanceToNearestVertical < 15 || DistanceToNearestHorizontal < 15) ? 0.7 : 0.3;
            var result     = Brain.Activate(new List <double> {
                nearWalls, Angle, DistanceToNearestFood, foodNear, AngleToFood, DistanceToNearestAgent, AngleToAgent, agentsNear
            });

            Angle += result[0];
            Angle  = Angle % 360;

            if (canMove)
            {
                base.Move();
            }
        }
Esempio n. 4
0
        public bool Accept(LinkedList <StateActionReward> memory)
        {
            var last = memory.Last.Value;

            Debug.Assert(last.State.Length + last.Action.Length + 1 == Brain.InputCount);

            for (int i = 0; i < last.State.Length; i++)
            {
                Brain.InputSignalArray[i] = last.State[i];
            }

            for (int i = 0; i < last.Action.Length; i++)
            {
                Brain.InputSignalArray[i + last.State.Length] = last.Action[i];
            }

            Brain.InputSignalArray[last.State.Length + last.Action.Length] =
                Math.Min(1, Math.Max(0, (RewardNormalizer + last.Reward) / (2 * RewardNormalizer)));

            // Activate the network
            Brain.Activate();

            return(Brain.OutputSignalArray[0] >= AcceptThreshold);
        }