public void ExampleTest1() { Accord.Math.Tools.SetupGenerator(0); // We'll use a simple XOR function as input. double[][] inputs = { new double[] { 0, 0 }, // 0 xor 0 new double[] { 0, 1 }, // 0 xor 1 new double[] { 1, 0 }, // 1 xor 0 new double[] { 1, 1 }, // 1 xor 1 }; // XOR output, corresponding to the input. double[][] outputs = { new double[] { 0 }, // 0 xor 0 = 0 new double[] { 1 }, // 0 xor 1 = 1 new double[] { 1 }, // 1 xor 0 = 1 new double[] { 0 }, // 1 xor 1 = 0 }; // Setup the deep belief network (2 inputs, 3 hidden, 1 output) DeepBeliefNetwork network = new DeepBeliefNetwork(2, 3, 1); // Initialize the network with Gaussian weights new GaussianWeights(network, 0.1).Randomize(); // Update the visible layer with the new weights network.UpdateVisibleWeights(); // Setup the learning algorithm. DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(network) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) { LearningRate = 0.1, Momentum = 0.5, Decay = 0.001, } }; // Unsupervised learning on each hidden layer, except for the output. for (int i = 0; i < network.Layers.Length - 1; i++) { teacher.LayerIndex = i; // Compute the learning data with should be used var layerInput = teacher.GetLayerInput(inputs); // Train the layer iteratively for (int j = 0; j < 5000; j++) teacher.RunEpoch(layerInput); } // Supervised learning on entire network, to provide output classification. var backpropagation = new BackPropagationLearning(network) { LearningRate = 0.1, Momentum = 0.5 }; // Run supervised learning. for (int i = 0; i < 5000; i++) backpropagation.RunEpoch(inputs, outputs); // Test the resulting accuracy. int correct = 0; for (int i = 0; i < inputs.Length; i++) { double[] outputValues = network.Compute(inputs[i]); double outputResult = outputValues.First() >= 0.5 ? 1 : 0; if (outputResult == outputs[i].First()) { correct++; } } Assert.AreEqual(4, correct); }
public void Train(double[][] i, double[][] o = null, int outputLength = 10, int hiddenLayer = -1) { if (n == null) { if (File.Exists(p)) n = DeepBeliefNetwork.Load(p); else { outputLength = (o == null) ? outputLength : o[0].Length; hiddenLayer = (hiddenLayer == -1) ? (int)Math.Log(i[0].Length, outputLength) : hiddenLayer; List<int> layers = new List<int>(); for (int j = 0; j < hiddenLayer; j++) layers.Add(i[0].Length); layers.Add(outputLength); n = new DeepBeliefNetwork(new BernoulliFunction(), i[0].Length, layers.ToArray()); new GaussianWeights(n).Randomize(); } } dynamic t; if (o == null) { t = new DeepBeliefNetworkLearning(n) { Algorithm = (h, v, j) => new ContrastiveDivergenceLearning(h, v), LayerIndex = n.Machines.Count - 1, }; while (true) e = t.RunEpoch(t.GetLayerInput(i)); } else { t = new DeepNeuralNetworkLearning(n) { Algorithm = (ann, j) => new ParallelResilientBackpropagationLearning(ann), LayerIndex = n.Machines.Count - 1, }; while (true) e = t.RunEpoch(t.GetLayerInput(i), o); } }
private static DeepBeliefNetwork createNetwork(double[][] inputs) { DeepBeliefNetwork network = new DeepBeliefNetwork(6, 2, 1); network.Machines[0].Hidden.Neurons[0].Weights[0] = 0.00461421; network.Machines[0].Hidden.Neurons[0].Weights[1] = 0.04337112; network.Machines[0].Hidden.Neurons[0].Weights[2] = -0.10839599; network.Machines[0].Hidden.Neurons[0].Weights[3] = -0.06234004; network.Machines[0].Hidden.Neurons[0].Weights[4] = -0.03017057; network.Machines[0].Hidden.Neurons[0].Weights[5] = 0.09520391; network.Machines[0].Hidden.Neurons[0].Threshold = 0; network.Machines[0].Hidden.Neurons[1].Weights[0] = 0.08263872; network.Machines[0].Hidden.Neurons[1].Weights[1] = -0.118437; network.Machines[0].Hidden.Neurons[1].Weights[2] = -0.21710971; network.Machines[0].Hidden.Neurons[1].Weights[3] = 0.02332903; network.Machines[0].Hidden.Neurons[1].Weights[4] = 0.00953116; network.Machines[0].Hidden.Neurons[1].Weights[5] = 0.09870652; network.Machines[0].Hidden.Neurons[1].Threshold = 0; network.Machines[0].Visible.Neurons[0].Threshold = 0; network.Machines[0].Visible.Neurons[1].Threshold = 0; network.Machines[0].Visible.Neurons[2].Threshold = 0; network.Machines[0].Visible.Neurons[3].Threshold = 0; network.Machines[0].Visible.Neurons[4].Threshold = 0; network.Machines[0].Visible.Neurons[5].Threshold = 0; network.UpdateVisibleWeights(); DeepBeliefNetworkLearning target = new DeepBeliefNetworkLearning(network) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) }; for (int layer = 0; layer < 2; layer++) { target.LayerIndex = layer; double[][] layerInputs = target.GetLayerInput(inputs); int iterations = 5000; double[] errors = new double[iterations]; for (int i = 0; i < iterations; i++) errors[i] = target.RunEpoch(layerInputs); } return network; }
static void Main(string[] args) { double[][] inputs; double[][] outputs; double[][] testInputs; double[][] testOutputs; // Load ascii digits dataset. inputs = DataManager.Load(@"../../../data/data.txt", out outputs); // The first 500 data rows will be for training. The rest will be for testing. testInputs = inputs.Skip(500).ToArray(); testOutputs = outputs.Skip(500).ToArray(); inputs = inputs.Take(500).ToArray(); outputs = outputs.Take(500).ToArray(); // Setup the deep belief network and initialize with random weights. DeepBeliefNetwork network = new DeepBeliefNetwork(inputs.First().Length, 10, 10); new GaussianWeights(network, 0.1).Randomize(); network.UpdateVisibleWeights(); // Setup the learning algorithm. DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(network) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) { LearningRate = 0.1, Momentum = 0.5, Decay = 0.001, } }; // Setup batches of input for learning. int batchCount = Math.Max(1, inputs.Length / 100); // Create mini-batches to speed learning. int[] groups = Accord.Statistics.Tools.RandomGroups(inputs.Length, batchCount); double[][][] batches = inputs.Subgroups(groups); // Learning data for the specified layer. double[][][] layerData; // Unsupervised learning on each hidden layer, except for the output layer. for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++) { teacher.LayerIndex = layerIndex; layerData = teacher.GetLayerInput(batches); for (int i = 0; i < 200; i++) { double error = teacher.RunEpoch(layerData) / inputs.Length; if (i % 10 == 0) { Console.WriteLine(i + ", Error = " + error); } } } // Supervised learning on entire network, to provide output classification. var teacher2 = new BackPropagationLearning(network) { LearningRate = 0.1, Momentum = 0.5 }; // Run supervised learning. for (int i = 0; i < 500; i++) { double error = teacher2.RunEpoch(inputs, outputs) / inputs.Length; if (i % 10 == 0) { Console.WriteLine(i + ", Error = " + error); } } // Test the resulting accuracy. int correct = 0; for (int i = 0; i < inputs.Length; i++) { double[] outputValues = network.Compute(testInputs[i]); if (DataManager.FormatOutputResult(outputValues) == DataManager.FormatOutputResult(testOutputs[i])) { correct++; } } Console.WriteLine("Correct " + correct + "/" + inputs.Length + ", " + Math.Round(((double)correct / (double)inputs.Length * 100), 2) + "%"); Console.Write("Press any key to quit .."); Console.ReadKey(); }
private void learnLayerUnsupervised() { if (!Main.CanGenerate) return; Dispatcher dispatcher = Dispatcher.CurrentDispatcher; new Task(() => { DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(Main.Network) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) { LearningRate = LearningRate, Momentum = 0.5, Decay = WeightDecay, }, LayerIndex = SelectedLayerIndex - 1, }; double[][] inputs; Main.Database.Training.GetInstances(out inputs); int batchCount = Math.Max(1, inputs.Length / BatchSize); // Create mini-batches to speed learning int[] groups = Accord.Statistics.Tools .RandomGroups(inputs.Length, batchCount); double[][][] batches = inputs.Subgroups(groups); // Gather learning data for the layer double[][][] layerData = teacher.GetLayerInput(batches); var cd = teacher.GetLayerAlgorithm(teacher.LayerIndex) as ContrastiveDivergenceLearning; // Start running the learning procedure for (int i = 0; i < Epochs && !shouldStop; i++) { double error = teacher.RunEpoch(layerData) / inputs.Length; dispatcher.BeginInvoke((Action<int, double>)updateError, DispatcherPriority.ContextIdle, i + 1, error); if (i == 10) cd.Momentum = Momentum; } IsLearning = false; }).Start(); }