public void ShouldReturnCorrectGenotype() { NetVector v1 = NetVector.FromValues(new double[] { 1, 2, 3 }); NetVector v2 = NetVector.FromValues(new double[] { 4, 5, 6 }); NetVector v3 = NetVector.FromValues(new double[] { 7, 8, 9 }); NetVector v4 = NetVector.FromValues(new double[] { 10, 11, 12 }); NetVector v5 = NetVector.FromValues(new double[] { 13, 14, 15, 16, 17 }); NetVector v6 = NetVector.FromValues(new double[] { 18, 19, 20, 21, 22 }); Neuron n1 = new Neuron(v1, Config.ACTIVATION_FUNC); Neuron n2 = new Neuron(v2, Config.ACTIVATION_FUNC); Neuron n3 = new Neuron(v3, Config.ACTIVATION_FUNC); Neuron n4 = new Neuron(v4, Config.ACTIVATION_FUNC); Neuron n5 = new Neuron(v5, Config.ACTIVATION_FUNC); Neuron n6 = new Neuron(v6, Config.ACTIVATION_FUNC); NetLayer l1 = new NetLayer(new Neuron[] { n1, n2, n3, n4 }); NetLayer l2 = new NetLayer(new Neuron[] { n5, n6 }); NeuralNetwork n = new NeuralNetwork(new NetLayer[] { l1, l2 }); List <NeuralNetwork> ln = new List <NeuralNetwork>(); ln.Add(n); List <List <double> > genotypes = new NeuralNetworkConverter().ToGenotypes(ln); Assert.AreEqual(genotypes.Count, 1); TestUtils.AssertEqualDoubleList( new List <double>(new double[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22 }), genotypes[0] ); }
private NeuralNetwork ToNetwork(List <double> genotype, int[] topology) { int index = 0; NetLayer[] layers = new NetLayer[topology.Length - 1]; for (int i = 1; i < topology.Length; ++i) { Neuron[] neurons = new Neuron[topology[i]]; for (int j = 0; j < neurons.Length; ++j) { double[] weight = new double[topology[i - 1] + 1]; for (int k = 0; k < weight.Length; ++k) { weight[k] = genotype[index++]; } neurons[j] = new Neuron(NetVector.FromValues(weight), Config.ACTIVATION_FUNC); } layers[i - 1] = new NetLayer(neurons); } return(new NeuralNetwork(layers)); }
public void ShouldReturnCorrectWithoutBias() { NetVector v1 = NetVector.FromValues(new double[] { 5, 1.5, 2.4 }); NetVector v2 = NetVector.FromValues(new double[] { 0.23, 3.4, 9.9 }); double result = v1.Scalar(v2); Assert.AreEqual(30.01, result, 0.01); }
public void ShouldReturnCorrectWithBias() { NetVector v1 = NetVector.FromValues(new double[] { 1, 3, 2 }); NetVector v2 = NetVector.FromValues(new double[] { 1, 4 }); double result = v1.Scalar(v2, -1); Assert.AreEqual(10, result); }
public void ShouldReturnCorrectResponse() { NetVector v = NetVector.FromValues(new double[] { 1.1, 2.2, 3.3, 4.4, 5.5 }); Neuron n = new Neuron(v, ActivationFunctions.Linear); NetVector v2 = NetVector.FromValues(new double[] { 2.2, 3.3, 4.4, 5.5 }); double result = n.Response(v2); Assert.AreEqual(64.24, result, 0.01); }
public void ShouldReturnException() { NetVector v1 = NetVector.FromValues(new double[] { 5, 1.5, 2.4, 6.6 }); NetVector v2 = NetVector.FromValues(new double[] { 0.23, 3.4, 9.9 }); Assert.Throws <System.ArgumentException>(delegate() { v1.Scalar(v2); }); }
public NetVector Response(NetVector inputVector) { int layerSize = this.neurons.Length; double[] outputs = new double[layerSize]; for (int i = 0; i < layerSize; ++i) { outputs[i] = neurons[i].Response(inputVector); } return(NetVector.FromValues(outputs)); }
public double[] Response(double[] inputValues) { if (inputValues.Length != topology[0]) { throw new System.ArgumentException("Input vector size is not correct"); } NetVector response = NetVector.FromValues(inputValues).Normalize; foreach (NetLayer hidden in layers) { response = hidden.Response(response); } return(response.Values); }
public void ShouldReturnCorrectResponse() { NetVector v1 = NetVector.FromValues(new double[] { 1, 2, 3 }); Neuron n1 = new Neuron(v1, ActivationFunctions.Linear); NetVector v2 = NetVector.FromValues(new double[] { 4, 5, 6 }); Neuron n2 = new Neuron(v2, ActivationFunctions.Linear); NetLayer layer = new NetLayer(new Neuron[] { n1, n2 }); NetVector v3 = NetVector.FromValues(new double[] { 7, 8 }); // -1 + 14 + 24 ; -4 + 35 + 48 double[] r = layer.Response(v3).Values; Assert.AreEqual(2, r.Length); Assert.AreEqual(37, r[0], 0.01); Assert.AreEqual(79, r[1], 0.01); }