public void FeedForwardThrowsIfLayerActivatedSumsAreNull() { Layer hidden = new Layer(LayerType.Hidden, new LogisticSigmoidActivationFunction(), 1, 4); Layer output = new Layer(LayerType.Output, new SoftmaxActivationFunction(), 2, 3); hidden.Nodes[0].Input = 1; hidden.Nodes[1].Input = 2; hidden.Nodes[2].Input = 3; hidden.Nodes[3].Input = 4; hidden.Biases = new List<double>() { 1, 1, 1 }; hidden.Weights = new List<double>() {1,1,1,1,1,1,1,1,1,1,1,1}; hidden.FeedForward(output); }
public void FeedForwardSetsNextLayerInputsCorrectly() { List<double> biases = new List<double>() { 1, 1, 1, 1 }; List<double> weights = new List<double>() { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 }; Layer input = new Layer(LayerType.Input, null, 0, 3); Layer hidden = new Layer(LayerType.Hidden, new LogisticSigmoidActivationFunction(), 1, 4); input.Weights = weights; input.Biases = biases; input.Nodes[0].Input = 1; input.Nodes[1].Input = 2; input.Nodes[2].Input = 3; input.Nodes[0].ActivatedSum = 1; input.Nodes[1].ActivatedSum = 2; input.Nodes[2].ActivatedSum = 3; input.ActivateNodes(); input.FeedForward(hidden); Assert.AreEqual(7, hidden.Nodes[0].Input); }
public void FeedForwardThrowsIfBiasesDoesNotEqualNextLayerNodes() { Layer input = new Layer(LayerType.Input, null, 0, 5); Layer hidden = new Layer( LayerType.Hidden, new LogisticSigmoidActivationFunction(), 1, 4); input.Nodes[0].Input = 1; input.Nodes[1].Input = 2; input.Nodes[2].Input = 3; input.Nodes[3].Input = 4; input.Nodes[0].ActivatedSum = 1; input.Nodes[1].ActivatedSum = 2; input.Nodes[2].ActivatedSum = 3; input.Nodes[3].ActivatedSum = 4; input.ActivateNodes(); input.FeedForward(hidden); }