public static bool Test_Weights() { var cut = MLPNetwork.CreateMLPNetwork(new int[] { 1, 1 }, new double[] { 1.0 }, null); var result = cut.Calculate(new double[] { 0.5 }); return(Math.Abs(result[0] - 0.5) < 0.001); }
public static bool Test_MoreWeights_LinearActivation() { var cut = MLPNetwork.CreateMLPNetwork(new int[] { 2, 2, 1 }, new double[] { 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 }, MLPNetwork.Identity); var result = cut.Calculate(new double[] { 0.5, 0.5 }); return(Math.Abs(result[0] - 2) < 0.001); }
public void VerifyNetwork() { var cut = MLPNetwork.CreateMLPNetwork(new int[] { 1, 1 }, new double[] { 1.0 }, null); var result = cut.Calculate(new double[] { 0.35 }); Assert.Equal(0.35, result[0]); }
public static bool Test_ReuseNetwork_Sameresult() { var cut = MLPNetwork.CreateMLPNetwork(new int[] { 2, 2, 1 }, new double[] { 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 }, MLPNetwork.Identity); var result = cut.Calculate(new double[] { 0.5, 0.5 }); var result2 = cut.Calculate(new double[] { 0.5, 0.5 }); return(Math.Abs(result[0] - result2[0]) < 0.001); }
public static bool Test_MoreWeights() { var cut = MLPNetwork.CreateMLPNetwork(new int[] { 2, 2, 1 }, new double[] { 1.0, 1.0, 1.0, 1.0, 1.0, 1.0 }, null); var result = cut.Calculate(new double[] { 0.5, 0.5 }); var expected = Math.Tanh(1.0) + Math.Tanh(1.0); return(Math.Abs(result[0] - expected) < 0.001); }
public static bool Test_Unwrap_Create_SameResult() { var cut = MLPNetwork.CreateMLPNetwork(new int[] { 1, 1 }, new double[] { 1.0 }, null); var result1 = cut.Calculate(new double[] { 0.5 }); var weights = cut.ExtractWeights(); var cut2 = MLPNetwork.CreateMLPNetwork(new int[] { 1, 1 }, weights, null); var result2 = cut2.Calculate(new double[] { 0.5 }); return(Math.Abs(result1[0] - result2[0]) < 0.001); }
public static MLPNetwork CreateMLPNetwork(NetworkSetup setup) { var network = new MLPNetwork { Layers = new Node[setup.Layers.Count][] }; for (var i = 0; i < setup.Layers.Count; i++) { var layer = setup.Layers[i]; var includeBias = i != setup.Layers.Count - 1; network.Layers[i] = Enumerable.Range(0, layer.NodeCount + (includeBias?1:0)).Select(l => new Node(layer.ActivationFunction, l == layer.NodeCount)).ToArray(); } for (var i = 1; i < network.Layers.Length; i++) { var weightCounter = 0; var currentLayer = network.Layers[i]; var previousLayer = network.Layers[i - 1]; var layerSetup = setup.Layers[i]; foreach (var n in previousLayer) { foreach (var nextNode in currentLayer) { if (nextNode.IsBias) { continue; } nextNode.InputLinks.Add(new Link(layerSetup.Weights[weightCounter++]) { SourceNode = n, DestinationNode = nextNode }); } } } return(network); }
public NetworkPlayer() { var setup = ReadSetup("weights.txt", "network.txt"); m_network = MLPNetworkFactory.CreateMLPNetwork(setup); }
public NetworkWrapper(string filename, string configName) { var setup = ReadSetup(filename, configName); m_network = MLPNetworkFactory.CreateMLPNetwork(setup); }