Exemplo n.º 1
0
    public void CompareWeightsReturnsExpectedResultOfDifferent()
    {
        // Arrange
        const int nInputs  = 9;
        const int nOutputs = 1;

        int[] hiddenLayers = new int[] { 9 };
        Dna   dna1         = Dna.GenerateRandomDnaEncoding(nInputs, hiddenLayers, nOutputs, ActivationType.LeakyRelu, true);

        dna1.WeightsAndBiases.Should().HaveCount(100);
        dna1.WeightsAndBiases.Clear();
        dna1.WeightsAndBiases.AddRange(Enumerable.Repeat(1.00, 100));
        Dna dna2 = Dna.Clone(dna1);

        dna1.WeightsAndBiases.Should().Equal(dna2.WeightsAndBiases);
        dna2.WeightsAndBiases[2] = -49.00; // aggregate absolute weight difference of dna2 vs dna1 should now be 50
        dna1.WeightsAndBiases.Should().NotEqual(dna2.WeightsAndBiases);

        // Act
        var comparisonResult = DnaUtils.CompareWeights(dna1, dna2);

        // Assert
        float  percentWeightDiff      = comparisonResult.Item1;
        double percentWeightValueDiff = comparisonResult.Item2;

        percentWeightDiff.Should().Be(0.01f);     // ie. 1% of weights differ in value
        percentWeightValueDiff.Should().Be(0.5f); // ie. total weight values of dna2 differ by 50% (NOT 50% bigger)
    }
Exemplo n.º 2
0
    public void NeuralNetworkFeedForwardTests()
    {
        // Arrange
        const int nInputs  = 3;
        const int nOutputs = 3;
        const int outputLayerActivationIndex = 0;

        int[] hiddenLayersNeuronCount = new int[] { 2, 9, 4 };
        Dna   dna = Dna.GenerateRandomDnaEncoding(nInputs, hiddenLayersNeuronCount, nOutputs, (ActivationType)outputLayerActivationIndex, true);
        Func <List <double>, ActivationType, INeuron> fakeNeuronFactory = (List <double> a, ActivationType b) => new FakeNeuron(a, b);

        // Act
        NeuralNetwork neuralNetwork = new NeuralNetwork(dna, fakeNeuronFactory);
        List <double> networkInput  = new List <double>()
        {
            1, 2, 3
        };
        List <double> networkOutput = neuralNetwork.Think(networkInput);

        // Assert
        IEnumerable <FakeNeuron> allNeurons = neuralNetwork.Layers.SelectMany(n => n).Cast <FakeNeuron>(); // SelectMany = flatmap
        int expectedNumberOfNeurons         = hiddenLayersNeuronCount[0] + hiddenLayersNeuronCount[1] + hiddenLayersNeuronCount[2] + nOutputs;

        allNeurons.Should().HaveCount(expectedNumberOfNeurons);
        allNeurons.Should().HaveCount(FakeNeuron.Instances.Count, "count of fake neurons did not match number of created neurons");

        foreach (FakeNeuron n in neuralNetwork.Layers[0].Cast <FakeNeuron>())
        {
            n.RecievedInputs.Should().HaveCount(1, "neurons were not fired the expected number of times");
            n.RecievedInputs[0].Should().Equal(networkInput);
            n.Outputs.Should().HaveCount(1);
        }

        foreach (FakeNeuron n in neuralNetwork.Layers[1].Cast <FakeNeuron>())
        {
            n.RecievedInputs.Should().HaveCount(1, "neurons were not fired the expected number of times");
            n.RecievedInputs[0].Should().Equal(Enumerable.Range(0, hiddenLayersNeuronCount[0]), "layer did not receive expected input from previous layer");
            n.Outputs.Should().HaveCount(1);
        }

        foreach (FakeNeuron n in neuralNetwork.Layers[2].Cast <FakeNeuron>())
        {
            n.RecievedInputs.Should().HaveCount(1, "neurons were not fired the expected number of times");
            n.RecievedInputs[0].Should().Equal(Enumerable.Range(hiddenLayersNeuronCount[0], hiddenLayersNeuronCount[1]), "layer did not receive expected input from previous layer");
            n.Outputs.Should().HaveCount(1);
        }

        foreach (FakeNeuron n in neuralNetwork.Layers[3].Cast <FakeNeuron>())
        {
            n.RecievedInputs.Should().HaveCount(1, "neurons were not fired the expected number of times");
            n.RecievedInputs[0].Should().Equal(Enumerable.Range(hiddenLayersNeuronCount[0] + hiddenLayersNeuronCount[1], hiddenLayersNeuronCount[2]), "layer did not receive expected input from previous layer");
            n.Outputs.Should().HaveCount(1);
        }

        networkOutput.Should().Equal(Enumerable.Range(hiddenLayersNeuronCount[0] + hiddenLayersNeuronCount[1] + hiddenLayersNeuronCount[2], nOutputs), "received expected outputs from network");
    }
Exemplo n.º 3
0
    public void NeuralNetworkStructureTests()
    {
        // Arrange
        const int nInputs  = 5;
        const int nOutputs = 3;
        const int outputLayerActivationIndex = 0;

        int[] hiddenLayersNeuronCount = new int[] { 4, 3, 6 };
        Dna   dna = Dna.GenerateRandomDnaEncoding(nInputs, hiddenLayersNeuronCount, nOutputs, (ActivationType)outputLayerActivationIndex, true);

        // Act
        NeuralNetwork neuralNetwork = new NeuralNetwork(dna);

        // Assert
        // Correct number of layers and neurons per layer
        neuralNetwork.Layers.Should().HaveCount(hiddenLayersNeuronCount.Count() + 1);
        neuralNetwork.Layers[0].Should().HaveCount(hiddenLayersNeuronCount[0]);
        neuralNetwork.Layers[1].Should().HaveCount(hiddenLayersNeuronCount[1]);
        neuralNetwork.Layers[2].Should().HaveCount(hiddenLayersNeuronCount[2]);
        neuralNetwork.Layers[3].Should().HaveCount(nOutputs);

        // Neurons and weight counts
        IEnumerable <INeuron> allNeurons = neuralNetwork.Layers.SelectMany(n => n); // SelectMany = flatmap
        int expectedNumberOfNeurons      = hiddenLayersNeuronCount[0] + hiddenLayersNeuronCount[1] + hiddenLayersNeuronCount[2] + nOutputs;

        allNeurons.Should().HaveCount(expectedNumberOfNeurons);
        allNeurons.Should().OnlyHaveUniqueItems();
        neuralNetwork.Layers[0].Should().OnlyContain(n => n.Weights.Count == nInputs, "neurons in hidden layer 0 had incorrect number of weight inputs");
        neuralNetwork.Layers[1].Should().OnlyContain(n => n.Weights.Count == hiddenLayersNeuronCount[0], "neurons in hidden layer 1 had incorrect number of weight inputs");
        neuralNetwork.Layers[2].Should().OnlyContain(n => n.Weights.Count == hiddenLayersNeuronCount[1], "neurons in hidden layer 2 had incorrect number of weight inputs");
        neuralNetwork.Layers[3].Should().OnlyContain(n => n.Weights.Count == hiddenLayersNeuronCount[2], "neurons in output layer had incorrect number of weight inputs");
        allNeurons.Should().OnlyContain(n => n.Bias <= 1 && n.Bias >= -1, "all neurons did not have Bias values between -1 and 1");

        // Neuron weight values
        List <double> reconstructedWeightsAndBiases = new List <double>();

        foreach (var layer in neuralNetwork.Layers)
        {
            foreach (var neuron in layer)
            {
                dna.WeightsAndBiases.Should().ContainInOrder(neuron.Weights);
                reconstructedWeightsAndBiases.Add(neuron.Bias);
                reconstructedWeightsAndBiases.AddRange(neuron.Weights);
            }
        }
        reconstructedWeightsAndBiases.Should().Equal(dna.WeightsAndBiases);

        // Neuron activation functions
        var activationFunctions         = allNeurons.Select(n => n.ActivationFunction);
        var expectedActivationFunctions = dna.ActivationIndexes.Select(index => Activation.Functions[(ActivationType)index]);

        activationFunctions.Should().Equal(expectedActivationFunctions);
    }
Exemplo n.º 4
0
    public void GeneratesRandomDnaWithHomogeneousActivation()
    {
        // Arrange
        const int nInputs  = 50;
        const int nOutputs = 8;
        const int neuronActivationIndex = 3;

        int[] hiddenLayers = new int[] { 30, 50, 4, 12 };

        int expectedNumberOfNeurons          = hiddenLayers[0] + hiddenLayers[1] + hiddenLayers[2] + hiddenLayers[3] + nOutputs;
        int expectedNumberOfWeightsAndBiases = expectedNumberOfNeurons + // ie. number of bias weights
                                               nInputs * hiddenLayers[0] +
                                               hiddenLayers[0] * hiddenLayers[1] +
                                               hiddenLayers[1] * hiddenLayers[2] +
                                               hiddenLayers[2] * hiddenLayers[3] +
                                               hiddenLayers[3] * nOutputs;

        int[] expectedOutputsPerLayer = new int[]
        {
            nInputs,
            hiddenLayers[0],
            hiddenLayers[1],
            hiddenLayers[2],
            hiddenLayers[3],
            nOutputs,
        };

        // Act
        Dna randomlyGeneratedDna = Dna.GenerateRandomDnaEncoding(nInputs, hiddenLayers, nOutputs, (ActivationType)neuronActivationIndex, false);

        // Assert
        randomlyGeneratedDna.OutputsPerLayer.Should().BeEquivalentTo(expectedOutputsPerLayer);
        randomlyGeneratedDna.WeightsAndBiases.Should().HaveCount(expectedNumberOfWeightsAndBiases);
        randomlyGeneratedDna.ActivationIndexes.Should().HaveCount(expectedNumberOfNeurons);
        randomlyGeneratedDna.ActivationIndexes.Should().OnlyContain(index => index == neuronActivationIndex, "dna was not initialised with uniform hidden layer activation functions");
        randomlyGeneratedDna.Heritage.Should().Be(DnaHeritage.New);

        foreach (var weightValue in randomlyGeneratedDna.WeightsAndBiases)
        {
            weightValue.Should().BeOfType(typeof(double)).And.BeInRange(-1, 1, "weight was not initialised between -1 and 1");
        }

        foreach (var activationIndex in randomlyGeneratedDna.ActivationIndexes)
        {
            activationIndex.Should().BeOfType(typeof(int)).And.BeInRange(0, Activation.FunctionsCount, "invalid activation index generated");
        }
    }
Exemplo n.º 5
0
    public void SelectsRandomDnaWeightedByFitnessExcluding()
    {
        // Arrange
        float      totalFitness = 0f;
        List <Dna> parentPool   = Enumerable.Range(0, 5)
                                  .Select((_) => Dna.GenerateRandomDnaEncoding(1, new int[] { 1 }, 1, ActivationType.BinaryStep, false))
                                  .Select((dna, index) =>
        {
            dna.RawFitnessRating = Mathf.Pow(index, 2);
            totalFitness        += dna.RawFitnessRating;
            return(dna);
        })
                                  .ToList();

        Dna excludedDna = parentPool[3]; // second fitess dna

        totalFitness -= excludedDna.RawFitnessRating;
        // List<Dna> remainingCandidatePool = parentPool.FindAll(dna => dna != excludedDna);

        Dictionary <Dna, float> expectedProportions = parentPool.ToDictionary(
            dna => dna,
            dna => dna == excludedDna ? 0 : dna.RawFitnessRating / totalFitness
            );


        // Act
        int sampleSize = 100000;
        Dictionary <Dna, int> selectionResults = parentPool.ToDictionary(dna => dna, dna => 0);

        for (int i = 0; i < sampleSize; i++)
        {
            selectionResults[Darwin.SelectRandomBasedOnFitness(parentPool, excludedDna)]++;
        }


        // Assert
        Dictionary <Dna, float> actualProportions = selectionResults.ToDictionary(
            kvp => kvp.Key,
            kvp => (float)kvp.Value / (float)sampleSize
            );

        selectionResults[excludedDna].Should().Be(0, "excluded was never selected");
        foreach (Dna dna in parentPool)
        {
            actualProportions[dna].Should().BeApproximately(expectedProportions[dna], 0.01f, "proportion was within 1% of expected value");
        }
    }
Exemplo n.º 6
0
    public void GeneratesRandomDnaWithHeterogeneousActivation()
    {
        // Arrange
        const int nInputs  = 5;
        const int nOutputs = 3;
        const int outputLayerActivationIndex = 0;

        int[] hiddenLayers = new int[] { 4, 3, 6 };

        int expectedNumberOfNeurons          = hiddenLayers[0] + hiddenLayers[1] + hiddenLayers[2] + nOutputs;
        int expectedNumberOfWeightsAndBiases = expectedNumberOfNeurons + // ie. number of bias weights
                                               nInputs * hiddenLayers[0] +
                                               hiddenLayers[0] * hiddenLayers[1] +
                                               hiddenLayers[1] * hiddenLayers[2] +
                                               hiddenLayers[2] * nOutputs;

        int[] expectedOutputsPerLayer = new int[]
        {
            nInputs,
            hiddenLayers[0],
            hiddenLayers[1],
            hiddenLayers[2],
            nOutputs,
        };

        // Act
        Dna randomlyGeneratedDna = Dna.GenerateRandomDnaEncoding(nInputs, hiddenLayers, nOutputs, (ActivationType)outputLayerActivationIndex, true);

        // Assert
        randomlyGeneratedDna.OutputsPerLayer.Should().Equal(expectedOutputsPerLayer);
        randomlyGeneratedDna.WeightsAndBiases.Should().HaveCount(expectedNumberOfWeightsAndBiases);
        randomlyGeneratedDna.ActivationIndexes.Should().HaveCount(expectedNumberOfNeurons);
        randomlyGeneratedDna.ActivationIndexes.Should().Contain(index => index > outputLayerActivationIndex, "dna was not initialised with random hidden layer activation functions");
        randomlyGeneratedDna.ActivationIndexes.Skip(expectedNumberOfNeurons - nOutputs).Should().OnlyContain(index => index == outputLayerActivationIndex, "output layer activation functions were not of the expected kind");

        foreach (var weightValue in randomlyGeneratedDna.WeightsAndBiases)
        {
            weightValue.Should().BeOfType(typeof(double)).And.BeInRange(-1, 1, "weight was not initialised between -1 and 1");
        }

        foreach (var activationIndex in randomlyGeneratedDna.ActivationIndexes)
        {
            activationIndex.Should().BeOfType(typeof(int)).And.BeInRange(0, Activation.FunctionsCount, "invalid activation index generated");
        }
    }
Exemplo n.º 7
0
    public void PerformsDnaMutation()
    {
        // Arrange
        const int nInputs  = 2;
        const int nOutputs = 2;
        const int outputLayerActivationIndex = 3;

        int[] hiddenLayers             = new int[] { 7 };
        Dna   originalDna              = Dna.GenerateRandomDnaEncoding(nInputs, hiddenLayers, nOutputs, (ActivationType)outputLayerActivationIndex, true);
        float weightMutationPrevalence = 0.2f;

        // Act
        Dna mutatedDna = Dna.CloneAndMutate(originalDna, DnaHeritage.MutatedElite, weightMutationPrevalence, 0.8f);

        // Assert
        CheckDnaIsNotReferentiallyEqual(originalDna, mutatedDna);
        // Structure
        mutatedDna.OutputsPerLayer.Should().Equal(originalDna.OutputsPerLayer);
        mutatedDna.Heritage.Should().Be(DnaHeritage.MutatedElite);

        // Weights
        mutatedDna.WeightsAndBiases.Should().NotEqual(originalDna.WeightsAndBiases);
        mutatedDna.WeightsAndBiases.Should().HaveCount(originalDna.WeightsAndBiases.Count);
        List <double> mutatedWeights = new List <double>();

        for (int i = 0; i < originalDna.WeightsAndBiases.Count; i++)
        {
            if (originalDna.WeightsAndBiases[i] != mutatedDna.WeightsAndBiases[i])
            {
                mutatedWeights.Add(mutatedDna.WeightsAndBiases[i]);
            }
        }
        mutatedWeights.Should().HaveCount(Mathf.CeilToInt(weightMutationPrevalence * originalDna.WeightsAndBiases.Count));

        // Activation
        mutatedDna.ActivationIndexes.Should().NotEqual(originalDna.ActivationIndexes);
        mutatedDna.ActivationIndexes.Should().HaveCount(originalDna.ActivationIndexes.Count);
        int indexOfOutputLayerActivation = originalDna.ActivationIndexes.Count - nOutputs;

        mutatedDna.ActivationIndexes.Skip(indexOfOutputLayerActivation)
        .Should().Equal(originalDna.ActivationIndexes.Skip(indexOfOutputLayerActivation), "preserves output layer activation functions");
    }
Exemplo n.º 8
0
    public void PerformsDnaCrossover()
    {
        // Arrange
        const int nInputs  = 7;
        const int nOutputs = 4;
        const int outputLayerActivationIndex = 3;

        int[] hiddenLayers = new int[] { 7, 1, 4, 7 };
        Dna   parent1Dna   = Dna.GenerateRandomDnaEncoding(nInputs, hiddenLayers, nOutputs, (ActivationType)outputLayerActivationIndex, true);
        Dna   parent2Dna   = Dna.GenerateRandomDnaEncoding(nInputs, hiddenLayers, nOutputs, (ActivationType)outputLayerActivationIndex, true);

        // Act
        List <Dna> offspring = Dna.CreateOffspring(parent1Dna, parent2Dna, 5, true);

        // Assert
        offspring.Should().HaveCount(2);
        CheckDnaIsNotReferentiallyEqual(offspring[0], offspring[1]);

        foreach (var child in offspring)
        {
            CheckDnaIsNotReferentiallyEqual(child, parent1Dna);
            CheckDnaIsNotReferentiallyEqual(child, parent2Dna);
            child.OutputsPerLayer.Should().Equal(parent1Dna.OutputsPerLayer);
            child.WeightsAndBiases.Should().HaveCount(parent1Dna.WeightsAndBiases.Count);
            child.WeightsAndBiases.Should().NotBeEquivalentTo(parent1Dna.WeightsAndBiases);
            child.WeightsAndBiases.Should().NotBeEquivalentTo(parent2Dna.WeightsAndBiases);
            child.ActivationIndexes.Should().HaveCount(parent1Dna.ActivationIndexes.Count);
            child.ActivationIndexes.Should().NotBeEquivalentTo(parent1Dna.ActivationIndexes);
            child.ActivationIndexes.Should().NotBeEquivalentTo(parent2Dna.ActivationIndexes);
            child.Heritage.Should().Be(DnaHeritage.Offspring);
        }

        double offspringTotalWeights = offspring[0].WeightsAndBiases.Sum() + offspring[1].WeightsAndBiases.Sum();
        double parentsTotalWeights   = parent1Dna.WeightsAndBiases.Sum() + parent2Dna.WeightsAndBiases.Sum();

        parentsTotalWeights.Should().Be(offspringTotalWeights);

        int offspringTotalActivation = offspring[0].ActivationIndexes.Sum() + offspring[1].ActivationIndexes.Sum();
        int parentsTotalActivation   = parent1Dna.ActivationIndexes.Sum() + parent2Dna.ActivationIndexes.Sum();

        offspringTotalActivation.Should().Be(parentsTotalActivation);
    }
Exemplo n.º 9
0
    public void CompareWeightsReturnsExpectedResultOfIdentical()
    {
        // Arrange
        const int nInputs  = 20;
        const int nOutputs = 8;

        int[] hiddenLayers = new int[] { 30, 4, 12 };
        Dna   dna1         = Dna.GenerateRandomDnaEncoding(nInputs, hiddenLayers, nOutputs, ActivationType.LeakyRelu, true);
        Dna   dna2         = Dna.Clone(dna1);

        // Act
        var comparisonResult = DnaUtils.CompareWeights(dna1, dna2);

        // Assert
        float  percentWeightDiff      = comparisonResult.Item1;
        double percentWeightValueDiff = comparisonResult.Item2;

        percentWeightDiff.Should().Be(0);
        percentWeightValueDiff.Should().Be(0);
    }
Exemplo n.º 10
0
    public void EqualsTest()
    {
        // Arrange
        const int nInputs  = 9;
        const int nOutputs = 1;

        int[] hiddenLayers       = new int[] { 9 };
        Dna   dna1               = Dna.GenerateRandomDnaEncoding(nInputs, hiddenLayers, nOutputs, ActivationType.LeakyRelu, true);
        Dna   dna1ValueClone     = Dna.Clone(dna1);
        Dna   dna1ReferenceClone = dna1;
        Dna   dna2               = Dna.Clone(dna1);

        dna2.WeightsAndBiases[0] = 50;

        // Act/Assert
        dna1.Equals(dna1).Should().BeTrue();

        dna1.Equals(dna1ReferenceClone).Should().BeTrue();
        dna1ReferenceClone.Equals(dna1).Should().BeTrue();

        dna1.Equals(dna1ValueClone).Should().BeTrue();
        dna1ValueClone.Equals(dna1).Should().BeTrue();

        dna1.Equals(dna2).Should().BeFalse();
        dna2.Equals(dna1).Should().BeFalse();

        dna1ValueClone.Equals(dna2).Should().BeFalse();
        dna2.Equals(dna1ValueClone).Should().BeFalse();

        dna1ReferenceClone.Equals(dna2).Should().BeFalse();
        dna2.Equals(dna1ReferenceClone).Should().BeFalse();

        dna1.WeightsAndBiases[1] = 100;

        dna1.Equals(dna1ReferenceClone).Should().BeTrue(); // reference stays equal
        dna1ReferenceClone.Equals(dna1).Should().BeTrue();

        dna1.Equals(dna1ValueClone).Should().BeFalse(); // value changes
        dna1ValueClone.Equals(dna1).Should().BeFalse();
    }