Пример #1
0
        /// <summary>
        /// Constructs a new network using a fixed mixing ration between two parents.
        /// </summary>
        /// <param name="parent1">The fist parent network.</param>
        /// <param name="parent2">The second parent network.</param>
        /// <param name="mixingRatio">The probability with which the first(second) child will have genes from the first(second) parent.</param>
        /// <param name="ch1">When this method returns contains the first child network.</param>
        /// <param name="ch2">When this method returns contains the second child network.</param>
        private void UniformCrossover(NeuralNetwork parent1, NeuralNetwork parent2, float mixingRatio, out NeuralNetwork ch1, out NeuralNetwork ch2)
        {
            DenseLayer[] parent1Layers = parent1.GetLayers();
            DenseLayer[] parent2Layers = parent2.GetLayers();

            var child1Layers = new DenseLayer[parent1Layers.Length];
            var child2Layers = new DenseLayer[parent1Layers.Length];

            for (int k = 0; k < child1Layers.Length; k++)
            {
                DenseLayer parent1Layer = parent1Layers[k];
                DenseLayer parent2Layer = parent2Layers[k];

                var child1Weights = new double[parent1Layer.InputNeuronsCount + 1, parent1Layer.OutputNeuronsCount];
                var child2Weights = new double[parent1Layer.InputNeuronsCount + 1, parent1Layer.OutputNeuronsCount];

                for (int i = 0; i < child1Weights.GetLength(0); i++)
                {
                    for (int j = 0; j < child1Weights.GetLength(1); j++)
                    {
                        if (random.NextDouble() < mixingRatio)
                        {
                            child1Weights[i, j] = parent1Layer[i, j];
                            child2Weights[i, j] = parent2Layer[i, j];
                        }
                        else
                        {
                            child1Weights[i, j] = parent2Layer[i, j];
                            child2Weights[i, j] = parent1Layer[i, j];
                        }
                    }
                }

                child1Layers[k] = new DenseLayer(child1Weights, parent1Layer.GetActivationFunction());
                child2Layers[k] = new DenseLayer(child2Weights, parent1Layer.GetActivationFunction());
            }

            ch1 = new NeuralNetwork(child1Layers);
            ch2 = new NeuralNetwork(child2Layers);
        }