void Start()
    {
        rigidbody2D = GetComponent <Rigidbody2D>();
        anim        = GetComponentInChildren <Animator>();
        nn          = new DeepNeuralNetwork(2, new int[] { 6 }, 2);
        if (weights != null)
        {
            nn.SetWeights(weights);
        }

        maxY     = GameController._instance.bgSize.y - 2f;
        spawnPos = GameController._instance.spawnPos;
    }
    /// <summary>
    /// This method creates a 1D array containing random weight and bias values.
    /// A 1D Array makes storing the weights easier
    /// The SetWeights method will then take the created array and initialize all arrays with the values
    /// Order: ihweights - hhWeights[] - hoWeights - hBiases[] - oBiases
    /// </summary>
    public void InitializeWeights()
    {
        // make wts
        double lo     = -0.75f;
        double hi     = +0.75f;
        int    numWts = DeepNeuralNetwork.NumWeights(this.nInput, this.nHidden, this.nOutput);

        double[] wts = new double[numWts];
        for (int i = 0; i < numWts; ++i)
        {
            wts[i] = (hi - lo) * rnd.NextDouble() + lo;
        }
        this.SetWeights(wts);
    }
Beispiel #3
0
 private void LoadNetworkButton_Click(object sender, RoutedEventArgs e)
 {
     try
     {
         OpenFileDialog ofd = new OpenFileDialog();
         ofd.DefaultExt = ".nn";
         ofd.Filter     = "Neural Network definition files (.nn)|*.nn";
         if (ofd.ShowDialog().GetValueOrDefault())
         {
             loadedNetwork = new DeepNeuralNetwork(ofd.FileName);
             string layerString = string.Empty;
             foreach (int ls in loadedNetwork.GetLayerCounts())
             {
                 layerString += ls.ToString() + " ";
             }
             model.LoadedNNName = System.IO.Path.GetFileName(ofd.FileName) + " ( " + layerString + ")";
         }
     }
     catch
     {
         MessageBox.Show("Error loading file.");
     }
 }
    } // SetWeights

    public void updateWeights(double[] wts, double chance, double mutationRate)
    {
        // scale the weights with the factor
        double[] weights    = this.GetWeights();
        double[] newWeights = new double[DeepNeuralNetwork.NumWeights(this.nInput, this.nHidden, this.nOutput)];

        for (int i = 0; i < wts.Length; i++)
        {
            if (rnd.Next(1, 101) <= mutationRate)
            {
                int x = rnd.Next(1, 101);
                // 20% chance for one of these cases to occure
                if (x <= 20)
                {
                    newWeights[i] = (0.75f - -0.75f) * rnd.NextDouble() + -0.75f;
                }
                else if (x <= 40)
                {
                    newWeights[i] += (0.375f - -0.375f) * rnd.NextDouble() + -0.375f;
                }
                else if (x <= 60)
                {
                    newWeights[i] -= (0.375f - -0.375f) * rnd.NextDouble() + -0.375f;
                }
                else if (x <= 80)
                {
                    newWeights[i] += (0.75f - -0.75f) * rnd.NextDouble() + -0.75f;
                }
                else
                {
                    newWeights[i] -= (0.75f - -0.75f) * rnd.NextDouble() + -0.75f;
                }
            }
            else
            {
                if (rnd.Next(1, 101) <= chance)
                {
                    newWeights[i] = wts[i];
                }
                else
                {
                    newWeights[i] = weights[i];
                }
            }
        }

        // order: ihweights - hhWeights[] - hoWeights - hBiases[] - oBiases
        int nw = NumWeights(this.nInput, this.nHidden, this.nOutput);  // total num wts + biases

        if (wts.Length != nw)
        {
            return;
        }
        int ptr = 0;                                   // pointer into wts[]

        for (int i = 0; i < nInput; ++i)               // input node
        {
            for (int j = 0; j < hNodes[0].Length; ++j) // 1st hidden layer nodes
            {
                ihWeights[i][j] = newWeights[ptr++];
            }
        }

        for (int h = 0; h < nLayers - 1; ++h)               // not last h layer
        {
            for (int j = 0; j < nHidden[h]; ++j)            // from node
            {
                for (int jj = 0; jj < nHidden[h + 1]; ++jj) // to node
                {
                    hhWeights[h][j][jj] = newWeights[ptr++];
                }
            }
        }

        int hi = this.nLayers - 1;  // if 3 hidden layers (0,1,2) last is 3-1 = [2]

        for (int j = 0; j < this.nHidden[hi]; ++j)
        {
            for (int k = 0; k < this.nOutput; ++k)
            {
                hoWeights[j][k] = newWeights[ptr++];
            }
        }

        for (int h = 0; h < nLayers; ++h)  // hidden node biases
        {
            for (int j = 0; j < this.nHidden[h]; ++j)
            {
                hBiases[h][j] = newWeights[ptr++];
            }
        }

        for (int k = 0; k < nOutput; ++k)
        {
            oBiases[k] = newWeights[ptr++];
        }
    } // updateWeights
Beispiel #5
0
        private async void StartButton_Click(object sender, RoutedEventArgs e)
        {
            if (model.TrainingActive)
            {
                MessageBox.Show("Training is already active.");
                return;
            }
            if (dataContainer == null)
            {
                MessageBox.Show("No data loaded.");
                return;
            }
            model.TrainingActive = true;
            //prepare network
            if (model.UseLoadedNN && loadedNetwork != null &&
                loadedNetwork.GetLayerCounts()[0] == dataContainer.GetInputDataSize() && loadedNetwork.GetLayerCounts().Last() == dataContainer.GetOutputDataSize())
            {
                neuralNetwork = loadedNetwork;
            }
            else
            {
                string[]   tokens = model.HiddenLayers.Split(' ').Where(s => !string.IsNullOrWhiteSpace(s)).ToArray();
                List <int> hlList = new List <int>();
                foreach (string token in tokens)
                {
                    if (int.TryParse(token, out int hl))
                    {
                        hlList.Add(hl);
                    }
                }
                neuralNetwork = new DeepNeuralNetwork(dataContainer.GetInputDataSize(), dataContainer.GetOutputDataSize(), hlList.ToArray());
            }
            //hyper-params
            int    epochs = 0, batchSize = 0;
            double learningRate = 0;
            bool   okToStart    = true;

            okToStart = okToStart && int.TryParse(model.Epochs, out epochs);
            okToStart = okToStart && int.TryParse(model.BatchSize, out batchSize);
            okToStart = okToStart && double.TryParse(model.LearningRate, out learningRate);
            if (!okToStart)
            {
                return;
            }
            _epochs = epochs;
            string layerString = string.Empty;

            foreach (int ls in neuralNetwork.GetLayerCounts())
            {
                layerString += ls.ToString() + " ";
            }
            model.ConsoleString += $"{layerString} - epochs = {epochs}, batch size = {batchSize}, learning rate = {learningRate}\n\n\n";
            //clear temp csv file
            File.Delete(_tempCsvFileName);
            //train
            await Task.Run(() =>
                           Trainer.Sgd(neuralNetwork, dataContainer.TrainingSetAsLabeledDataArray(), batchSize, epochs, learningRate,
                                       UpdateTrainingStatus, dataContainer.TestingSetAsLabeledDataArray(), dataContainer.CheckIfOutputIsCorrect));

            MessageBox.Show("Training done.");
            model.TrainingActive = false;
        }