public void BackPropagationIsCorrect() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1, 2 }, { 3, 5 } }); Layer2 layer = Layer2.CreateLinearLayer(weights); NetworkVector layerinput = new NetworkVector(new double[] { 1, -1 }); layer.Run(layerinput); NetworkVector outputgradient = new NetworkVector(new double[] { 7, 11 }); NetworkMatrix weightsGradientCheck = new NetworkMatrix(new double[, ] { { 7, -7 }, { 11, -11 } }); Assert.AreEqual(weightsGradientCheck, layer.WeightsGradient(outputgradient)); NetworkVector biasesGradientCheck = new NetworkVector(new double[] { 7, 11 }); Assert.AreEqual(biasesGradientCheck, layer.BiasesGradient(outputgradient)); NetworkVector inputGradientCheck = new NetworkVector(new double[] { 40, 69 }); Assert.AreEqual(inputGradientCheck, layer.InputGradient(outputgradient)); }
public void HandleMessageShouldCreateInitializeMessagesOnlyForActiveNodes() { // Arrange _message.MessageType = MessageType.MatrixUpdateMessage; _message.Data = new Dictionary <uint, NetworkMatrix> { [0] = NetworkMatrix.Initialize(_networkMock.Object), [1] = NetworkMatrix.Initialize(_networkMock.Object), [2] = NetworkMatrix.Initialize(_networkMock.Object), [3] = NetworkMatrix.Initialize(_networkMock.Object), [4] = NetworkMatrix.Initialize(_networkMock.Object), }; _nodes[1].IsActive = false; var linkedNodeCount = Receiver.LinkedNodesId.Count; // Act _messageHandler.HandleMessage(_message); // Assert _generalMessageCreatorMock.Verify(c => c.CreateMessages(It.Is <MessageInitializer> (m => m.MessageType == MessageType.MatrixUpdateMessage)), Times.Exactly(linkedNodeCount - 1)); _generalMessageCreatorMock.Verify(c => c.AddInQueue(It.Is <Message[]> (m => m.All(m1 => m1.MessageType == MessageType.MatrixUpdateMessage)), Receiver.Id), Times.Exactly(linkedNodeCount - 1)); Assert.That(Receiver.NetworkMatrix, Is.EqualTo(((Dictionary <uint, NetworkMatrix>)_message.Data)[0])); }
public void CanBPWC_trivialBatch() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1 } }); NetworkVector biases = new NetworkVector(new double[] { 10 }); BatchWeightedCombiner wc = new BatchWeightedCombiner(weights, biases); NetworkVector input = new NetworkVector(new double[] { 1 }); NetworkVector outputgradient = new NetworkVector(new double[] { 1 }); wc.StartBatch(); wc.Run(input); wc.BackPropagate(outputgradient); wc.EndBatchAndUpdate(); NetworkVector outputCheck = new NetworkVector(new double[] { 11 }); NetworkVector inputGradientCheck = new NetworkVector(new double[] { 1 }); double[,] weightsCheck = new double[, ] { { 0 } }; double[] biasesCheck = new double[] { 9 }; Assert.AreEqual(outputCheck, wc.Output); Assert.AreEqual(inputGradientCheck, wc.InputGradient); for (int i = 0; i < wc.NumberOfOutputs; i++) { Assert.AreEqual(biasesCheck[i], wc.State.Biases[i]); for (int j = 0; j < wc.NumberOfInputs; j++) { Assert.AreEqual(weightsCheck[i, j], wc.State.Weights[i, j]); } } }
private List <Channel> BuildRoute(NetworkMatrix networkMatrix, uint senderId, uint receiverId) { var currentNodeId = receiverId; var route = new List <Channel>(); while (currentNodeId != senderId) { var currentNode = Network.GetNodeById(currentNodeId); foreach (var linkedNodeId in currentNode.LinkedNodesId) { var difference = Math.Abs(networkMatrix.NodeIdWithCurrentPrice[currentNodeId] - networkMatrix.NodeIdWithCurrentPrice[linkedNodeId] - networkMatrix.PriceMatrix[linkedNodeId][currentNodeId]); if (difference < AllConstants.Eps) { var channel = Network.GetChannel(currentNodeId, linkedNodeId); route.Add(channel); currentNodeId = linkedNodeId; break; } } } route.Reverse(); return(route); }
public void CanMakeLayer() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1, 2 }, { 3, 4 } }); NeuralNet.NetComponent.Layer2 layer = new NeuralNet.NetComponent.Layer2(weights); Assert.IsNotNull(layer); }
public void CanMakeSigmoidLayer() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1, 2 }, { 3, 4 } }); Layer2 layer = Layer2.CreateLogisticLayer(weights); Assert.IsNotNull(layer); }
public void LayerHasRightSize() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1, 2, 3 }, { 3, 4, 5 } }); NeuralNet.NetComponent.Layer2 layer = new NeuralNet.NetComponent.Layer2(weights); Assert.AreEqual(3, layer.NumberOfInputs); Assert.AreEqual(2, layer.NumberOfOutputs); }
public void CanMakeLayerWithBiases() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1, 2 }, { 3, 4 } }); NetworkVector biases = new NetworkVector(new double[] { 5, 7 }); NeuralNet.NetComponent.Layer2 layer = new NeuralNet.NetComponent.Layer2(weights, biases); Assert.IsNotNull(layer); }
public void InputGradientRuns() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1 } }); NetworkVector outputgradient = new NetworkVector(new double[] { 1 }); Layer2 layer = Layer2.CreateLogisticLayer(weights); NetworkVector inputGradientCheck = new NetworkVector(new double[] { 0 }); Assert.AreEqual(inputGradientCheck, layer.InputGradient(outputgradient)); }
public void UnrunLayerHasZeroOutput() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1, 2, 3 }, { 3, 4, 5 } }); NeuralNet.NetComponent.Layer2 layer = new NeuralNet.NetComponent.Layer2(weights); NetworkVector outputCheck = new NetworkVector(new double[] { 0, 0 }); Assert.AreEqual(outputCheck, layer.Output); }
public void CanMakeBWCWithNullWeights() { NetworkMatrix weights = null; NetworkVector biases = new NetworkVector(new double[] { 1 }); try { BatchWeightedCombiner wc = new BatchWeightedCombiner(weights, biases); Assert.Fail("Failure to throw ArgumentException when trying to create a WeightedCombiner with null weights."); } catch (ArgumentException) { } }
public void CanMakeBWC() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1 } }); BatchWeightedCombiner wc = new BatchWeightedCombiner(weights); Assert.AreNotEqual(null, wc); Assert.AreEqual(1, wc.NumberOfOutputs); Assert.AreEqual(1, wc.NumberOfInputs); Assert.AreEqual(1, wc.State.Weights[0, 0]); Assert.AreEqual(0, wc.State.Biases[0]); }
public void CanRunBWC() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1 } }); NetworkVector biases = new NetworkVector(new double[] { 3 }); BatchWeightedCombiner wc = new BatchWeightedCombiner(weights, biases); NetworkVector input = new NetworkVector(wc.NumberOfInputs); wc.Run(input); Assert.AreEqual(3, wc.Output.ToArray()[0]); }
public void ResetShouldRemoveAllTables() { // Arrange _network.AddNode(_node1); _node1.NetworkMatrix = NetworkMatrix.Initialize(_network); // Act _network.Reset(); // Assert Assert.IsNull(_node1.NetworkMatrix); }
public void SigmoidLayerHasRightRun() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1, 0, 1 }, { 1, 1, 0 } }); NetworkVector inputvector = new NetworkVector(new double[] { 1, 2, 3 }); Layer2 layer = Layer2.CreateLogisticLayer(weights); layer.Run(inputvector); NetworkVector expectedResult = new NetworkVector(new double[] { logistic(4), logistic(3) }); Assert.AreEqual(expectedResult, layer.Output); }
public void InputGradientRequiresNonNullInput() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1 } }); NeuralNet.NetComponent.Layer2 layer = new NeuralNet.NetComponent.Layer2(weights); try { layer.InputGradient(null); Assert.Fail("Backpropogate should throw an ArgumentException for null input, but did not."); } catch (ArgumentException) { } }
public void InputGradientRequiresCorrectInputSize() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1 } }); NetworkVector badInput = new NetworkVector(new double[] { 1, 2, 3 }); NeuralNet.NetComponent.Layer2 layer = new NeuralNet.NetComponent.Layer2(weights); try { layer.InputGradient(badInput); Assert.Fail("Backpropogate should throw an ArgumentException if input dimension is not equal to NumberOfNeuron, but did not."); } catch (ArgumentException) { } }
public void CannotMakeLayerWithMismatchedBiases() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1, 2 }, { 3, 4 } }); NetworkVector biases = new NetworkVector(new double[] { 5, 7, 11 }); try { NeuralNet.NetComponent.Layer2 layer = new NeuralNet.NetComponent.Layer2(weights, biases); Assert.Fail("ArgumentException expected but not thrown"); } catch (ArgumentException) { } }
public void LinearLayerWithBiasesHasRightRun() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1, 0, 1 }, { 1, 1, 0 } }); NetworkVector biases = new NetworkVector(new double[] { 4, 3 }); NetworkVector inputvector = new NetworkVector(new double[] { 1, 2, 3 }); Layer2 layer = Layer2.CreateLinearLayer(weights, biases); layer.Run(inputvector); NetworkVector expectedResult = new NetworkVector(new double[] { 8, 6 }); Assert.AreEqual(expectedResult, layer.Output); }
public void NeuralFunctionNotNullRequiresDerivativeNotNull() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1, 2, 3 }, { 3, 4, 5 } }); NetworkVector biases = new NetworkVector(new double[] { 1, 2 }); try { NeuralNet.NetComponent.Layer2 layer = new NeuralNet.NetComponent.Layer2(weights, biases, x => 1.0, null); Assert.Fail("Attempt to create Layer with non-null _neuralFunction and null _neuralFunctioinDerivative should throw and Argument exception, but did not."); } catch (ArgumentException) { } }
public void CanRunBWC_2by3() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1, 2, 3 }, { 5, 7, 11 } }); NetworkVector biases = new NetworkVector(new double[] { 100, 200 }); BatchWeightedCombiner wc = new BatchWeightedCombiner(weights, biases); NetworkVector input = new NetworkVector(new double[] { 1, 2, 3 }); wc.Run(input); NetworkVector outputcheck = new NetworkVector(new double[] { 114, 252 }); Assert.AreEqual(outputcheck, wc.Output); }
public void LinearLayerHasRightRun() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1, 0, 1 }, { 1, 1, 0 } }); NetworkVector biases = new NetworkVector(new double[] { 0, 0 }); NetworkVector inputvector = new NetworkVector(new double[] { 1, 2, 3 }); NeuralNet.NetComponent.Layer2 layer = NeuralNet.NetComponent.Layer2.CreateLinearLayer(weights, biases); layer.Run(inputvector); NetworkVector result = layer.Output; NetworkVector expectedResult = new NetworkVector(new double[] { 4, 3 }); Assert.AreEqual(expectedResult, result); }
public void BackpropagateRunsWithNonzeroLayerInput() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1 } }); Layer2 layer = Layer2.CreateLinearLayer(weights); NetworkVector layerinput = new NetworkVector(new double[] { 2 }); layer.Run(layerinput); NetworkVector outputgradient = new NetworkVector(new double[] { 1 }); NetworkVector inputGradientCheck = new NetworkVector(new double[] { 1 }); Assert.AreEqual(inputGradientCheck, layer.InputGradient(outputgradient)); }
public void InputGradientRunsTwoByThree() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1, 2, 3 }, { 2, 3, 4 } }); Layer2 layer = Layer2.CreateLinearLayer(weights); NetworkVector layerinput = new NetworkVector(new double[] { 1, 0, -1 }); layer.Run(layerinput); NetworkVector outputgradient = new NetworkVector(new double[] { 1, 1 }); NetworkVector inputGradientCheck = new NetworkVector(new double[] { 3, 5, 7 }); Assert.AreEqual(inputGradientCheck, layer.InputGradient(outputgradient)); }
public NetworkMatrix CountPriceMatrix(uint currentId, uint?startId, NetworkMatrix matrix = null, SortedSet <uint> visitedNodes = null) { StartCountingPriceProcess(currentId, startId, ref matrix, ref visitedNodes); if (visitedNodes.Contains(currentId)) { visitedNodes.Add(currentId); return(matrix); } visitedNodes.Add(currentId); var currentNode = Network.GetNodeById(currentId); foreach (var linkedNodeId in currentNode.LinkedNodesId) { if (!startId.HasValue || startId.Value == currentId) { matrix.PriceMatrix[currentId][linkedNodeId] = CountPrice(currentId, linkedNodeId); } var currentPrice = matrix.NodeIdWithCurrentPrice[currentId] + matrix.PriceMatrix[currentId][linkedNodeId]; if (matrix.NodeIdWithCurrentPrice[linkedNodeId] > currentPrice) { matrix.NodeIdWithCurrentPrice[linkedNodeId] = currentPrice; } } if (!Network.Nodes.All(n => visitedNodes.Contains(n.Id))) { var nextNodeId = matrix.NodeIdWithCurrentPrice .Where(kv => !visitedNodes.Contains(kv.Key)) .Aggregate((l, r) => l.Value < r.Value ? l : r) .Key; if (!double.IsInfinity(matrix.NodeIdWithCurrentPrice[nextNodeId])) { return(CountPriceMatrix(nextNodeId, startId, matrix, visitedNodes)); } } return(matrix); }
public void InputGradientRunsTwoByThree() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1, 2, 3 }, { 2, 3, 4 } }); Layer2 layer = Layer2.CreateLogisticLayer(weights); NetworkVector layerinput = new NetworkVector(new double[] { 1, 0, -1 }); layer.Run(layerinput); NetworkVector outputgradient = new NetworkVector(new double[] { 1, 1 }); NetworkVector inputGradientCheck = new NetworkVector( new double[] { 0.31498075621051952, 0.52496792701753248, 0.7349550978245456 } ); Assert.AreEqual(inputGradientCheck, layer.InputGradient(outputgradient)); }
public void BackpropagateRuns() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1 } }); NetworkVector outputgradient = new NetworkVector(new double[] { 1 }); NeuralNet.NetComponent.Layer2 layer = new NeuralNet.NetComponent.Layer2(weights); NetworkVector inputGradientCheck = new NetworkVector(new double[] { 1 }); NetworkVector biasesGradientCheck = new NetworkVector(new double[] { 1 }); NetworkMatrix weightsGradientCheck = new NetworkMatrix(new double[, ] { { 0 } }); Assert.AreEqual(inputGradientCheck, layer.InputGradient(outputgradient)); Assert.AreEqual(biasesGradientCheck, layer.BiasesGradient(outputgradient)); Assert.AreEqual(weightsGradientCheck, layer.WeightsGradient(outputgradient)); }
static void Main(string[] args) { //Network network = new Network(2, 3, 1); /*network.BuildNetwork(network.Sigmoid); * network.SetupInputValues(1, 1); * network.SetupTargetValues(0); * network.ForwardPropagation(); */ Matrix mat1 = new Matrix(4, 2, 0, 0, 0, 1, 1, 0, 1, 1); Matrix mat2 = new Matrix(4, 1, 0, 1, 1, 0); //Matrix mat3 = Matrix.Multiply(mat1, mat2); NetworkMatrix netMat = new NetworkMatrix(NetworkMatrix.Sigmoid, mat1, 5, 1, mat2); netMat.Learn(10000); Console.ReadLine(); netMat.RunBackgroundLearn(); while (true) { Matrix temp = netMat.Use(new Matrix(1, 2, 1, 0)); Console.WriteLine(temp.ToString()); if (Console.ReadLine() == "stop") { break; } } netMat.StopBackgroundLearn(); }
public void CanBPWC2x3_nonTrivialBatch() { NetworkMatrix weights = new NetworkMatrix(new double[, ] { { 1, 2, 3 }, { 5, 7, 11 } }); NetworkVector biases = new NetworkVector(new double[] { 100, 200 }); BatchWeightedCombiner wc = new BatchWeightedCombiner(weights, biases); NetworkVector input = new NetworkVector(new double[] { 1, 2, 3 }); NetworkVector outputgradient = new NetworkVector(new double[] { 1, 1 }); wc.StartBatch(); for (int i = 0; i < 2; i++) { wc.Run(input); wc.BackPropagate(outputgradient); } wc.EndBatchAndUpdate(); NetworkVector outputCheck = new NetworkVector(new double[] { 114, 252 }); NetworkVector inputGradientCheck = new NetworkVector(new double[] { 6, 9, 14 }); double[,] weightsCheck = new double[, ] { { -1, -2, -3 }, { 3, 3, 5 } }; double[] biasesCheck = new double[] { 98, 198 }; Assert.AreEqual(outputCheck, wc.Output); Assert.AreEqual(inputGradientCheck, wc.InputGradient); for (int i = 0; i < wc.NumberOfOutputs; i++) { Assert.AreEqual(biasesCheck[i], wc.State.Biases[i]); for (int j = 0; j < wc.NumberOfInputs; j++) { Assert.AreEqual(weightsCheck[i, j], wc.State.Weights[i, j]); } } }
private void StartCountingPriceProcess(uint currentId, uint?startId, ref NetworkMatrix matrix, ref SortedSet <uint> visitedNodes) { if (visitedNodes == null) { visitedNodes = new SortedSet <uint>(); } if (matrix == null) { matrix = NetworkMatrix.Initialize(Network); matrix.NodeIdWithCurrentPrice[currentId] = 0.0; } if (startId.HasValue && currentId == startId.Value) { foreach (var key in matrix.NodeIdWithCurrentPrice.Keys.ToArray()) { matrix.NodeIdWithCurrentPrice[key] = double.PositiveInfinity; } matrix.NodeIdWithCurrentPrice[startId.Value] = 0.0; } }