예제 #1
0
 public static void PrintNetwork(NN nn)
 {
     Debug.Write("Inputs: ");
     for (int l = 0; l < nn.nodes.Length; l++)
     {
         if (l > 0 && l < nn.nodes.Length - 1)
         {
             Debug.Write("Hidden Layer " + l + " Values: ");
         }
         if (l == nn.nodes.Length - 1)
         {
             Debug.Write("Outputs: ");
         }
         for (int n = 0; n < nn.nodes[l].Length; n++)
         {
             Debug.Write(nn.nodes[l][n] + ", ");
         }
         Debug.Write(Environment.NewLine);
     }
     for (int layer = 0; layer < nn.nodes.Length - 1; layer++)
     {
         for (int node = 0; node < nn.nodes[layer].Length; node++)
         {
             for (int weight = 0; weight < nn.nodes[layer + 1].Length; weight++)
             {
                 Debug.WriteLine("Weights for layer " + layer + " node " + node + " weight " + nn.weights[layer][node][weight]);
             }
         }
     }
 }
예제 #2
0
        public static NN CopyNN(NN original)
        {
            NN copy;

            int[] nodeCount = new int[original.nodes.Length];
            for (int l = 0; l < original.nodes.Length; l++)
            {
                nodeCount[l] = original.nodes[l].Length;
            }

            copy = new NN(nodeCount, original.hiddenLayerFunctions, original.outputLayerFunction);

            for (int l = 0; l < original.nodes.Length; l++)
            {
                for (int n = 0; n < original.nodes[l].Length; n++)
                {
                    copy.nodes[l][n]  = original.nodes[l][n];
                    copy.biases[l][n] = original.biases[l][n];

                    if (l < original.nodes.Length - 1)
                    {
                        for (int w = 0; w < original.nodes[l + 1].Length; w++)
                        {
                            copy.weights[l][n][w] = original.weights[l][n][w];
                        }
                    }
                }
            }
            return(copy);
        }
예제 #3
0
        private void button4_Click(object sender, EventArgs e)
        {

       
            Training session = new Training();
            Random rnd = new Random();
            session.trainingData = Training.GenerateTrainingData(rnd);
            GA ga = new GA(100, session.QuadraticLoss, 1, 1, NN.Activation.Sigmoid, NN.Activation.Sigmoid, 1, 3, 2, 10, -1, 1, -1, 1, 5, 0.9, 1.1);
            Debug.WriteLine("Initialized");
            while (true)
            {
                for (int i = 0; i < 10; i++)
                {
                    bool success = ga.Iterate();
                }
                Debug.Write("cost: " + ga.population[0].cost);
                Debug.Write(" y: " + session.trainingData[0][1][0]);
                ga.population[0].nodes[0][0] = (float)session.trainingData[0][0][0];
                ga.population[0].Iterate();
                Debug.Write(" a: " + ga.population[0].nodes[ga.population[0].nodes.Length - 1][0]);
                Debug.Write(Environment.NewLine);
                double x = NN.NextDouble(rnd, 0, 1);
                Debug.Write("test: x: " + x);
                ga.population[0].nodes[0][0] = (float)x;
                ga.population[0].Iterate();
                Debug.Write(" a: " + ga.population[0].nodes[ga.population[0].nodes.Length - 1][0]);
                Debug.WriteLine(Environment.NewLine);
            }
        }
예제 #4
0
 public static void MutateWeights(NN nn, Random rnd, double minMutation, double maxMutation)
 {
     for (int l = 0; l < nn.nodes.Length - 1; l++)
     {
         for (int n = 0; n < nn.nodes[l].Length; n++)
         {
             for (int w = 0; w < nn.nodes[l + 1].Length; w++)
             {
                 nn.weights[l][n][w] *= MathTools.NextDouble(rnd, minMutation, maxMutation);
             }
         }
     }
 }
예제 #5
0
 public static bool RandomizeWeights(NN nn, Random rnd, double minValue, double maxValue)
 {
     for (int layer = 0; layer < nn.nodes.Length - 1; layer++)
     {
         for (int node = 0; node < nn.nodes[layer].Length; node++)
         {
             for (int weight = 0; weight < nn.nodes[layer + 1].Length; weight++)
             {
                 nn.weights[layer][node][weight] = MathTools.NextDouble(rnd, minValue, maxValue);
             }
         }
     }
     return(true);
 }
예제 #6
0
        public double QuadraticLoss(NN nn)
        {
            double sum = 0;

            for (int i = 0; i < trainingData.Length; i++)
            {
                for (int j = 0; j < trainingData[0][0].Length; j++)
                {
                    nn.nodes[0][j] = trainingData[i][0][j];
                }
                nn.Iterate();
                for (int j = 0; j < trainingData[0][1].Length; j++)
                {
                    sum += Math.Pow(nn.nodes[nn.nodes.Length - 1][j] - trainingData[i][1][j], 2);
                }
            }

            return(sum);
            // return 1 / nn.nodes[0].Length * sum;
        }
예제 #7
0
파일: GA.cs 프로젝트: brando130/CSharp_NN
        public GA(int populationSize, Func <NN, double> costFunction, int inputCount, int outputCount, NN.ActivationFunction activationFunctionHiddenLayers, NN.ActivationFunction activationFunctionOutputLayer, int minHiddenLayers, int maxHiddenLayers, int minHiddenNodes, int maxHiddenNodes, double weightMinValue, double weightMaxValue, double biasMinValue, double biasMaxValue, int elitesToCopy, double minMutationPercentage, double maxMutationPercentage)
        {
            population  = new NN[populationSize];
            cost        = costFunction;
            elites      = elitesToCopy;
            minMutation = minMutationPercentage;
            maxMutation = maxMutationPercentage;

            // For each new unit to be created
            for (int i = 0; i < populationSize; i++)
            {
                // Specify input, hidden, and output layer node counts
                int[] nodeCounts = new int[2 + rnd.Next(minHiddenLayers, maxHiddenLayers + 1)];

                nodeCounts[0] = inputCount;

                for (int j = 1; j < nodeCounts.Length - 1; j++)
                {
                    nodeCounts[j] = rnd.Next(minHiddenNodes, maxHiddenNodes + 1);
                }

                nodeCounts[nodeCounts.Length - 1] = outputCount;

                // Create the neural network
                NN.ActivationFunction[] functions = new NN.ActivationFunction[nodeCounts.Length - 1];

                for (int j = 0; j < nodeCounts.Length - 2; j++)
                {
                    functions[j] = activationFunctionHiddenLayers;
                }

                population[i] = new NN(nodeCounts, functions, activationFunctionOutputLayer);

                // Initialize weights
                bool success = NNHelper.RandomizeWeights(population[i], rnd, weightMinValue, weightMaxValue);
            }
        }
예제 #8
0
 public static double CostOfCosineFunction(NN nn)
 {
     nn.Iterate();
     return(Math.Cos(nn.nodes[nn.nodes.Length - 1][0]));
 }
예제 #9
0
 public static double CostOfSquaredFunction(NN nn)
 {
     // Return the first node of the last layer squared.
     nn.Iterate();
     return(Math.Pow(nn.nodes[nn.nodes.Length - 1][0], 2));
 }