public void ExampleTest1() { Accord.Math.Tools.SetupGenerator(0); // We'll use a simple XOR function as input. double[][] inputs = { new double[] { 0, 0 }, // 0 xor 0 new double[] { 0, 1 }, // 0 xor 1 new double[] { 1, 0 }, // 1 xor 0 new double[] { 1, 1 }, // 1 xor 1 }; // XOR output, corresponding to the input. double[][] outputs = { new double[] { 0 }, // 0 xor 0 = 0 new double[] { 1 }, // 0 xor 1 = 1 new double[] { 1 }, // 1 xor 0 = 1 new double[] { 0 }, // 1 xor 1 = 0 }; // Setup the deep belief network (2 inputs, 3 hidden, 1 output) DeepBeliefNetwork network = new DeepBeliefNetwork(2, 3, 1); // Initialize the network with Gaussian weights new GaussianWeights(network, 0.1).Randomize(); // Update the visible layer with the new weights network.UpdateVisibleWeights(); // Setup the learning algorithm. DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(network) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) { LearningRate = 0.1, Momentum = 0.5, Decay = 0.001, } }; // Unsupervised learning on each hidden layer, except for the output. for (int i = 0; i < network.Layers.Length - 1; i++) { teacher.LayerIndex = i; // Compute the learning data with should be used var layerInput = teacher.GetLayerInput(inputs); // Train the layer iteratively for (int j = 0; j < 5000; j++) teacher.RunEpoch(layerInput); } // Supervised learning on entire network, to provide output classification. var backpropagation = new BackPropagationLearning(network) { LearningRate = 0.1, Momentum = 0.5 }; // Run supervised learning. for (int i = 0; i < 5000; i++) backpropagation.RunEpoch(inputs, outputs); // Test the resulting accuracy. int correct = 0; for (int i = 0; i < inputs.Length; i++) { double[] outputValues = network.Compute(inputs[i]); double outputResult = outputValues.First() >= 0.5 ? 1 : 0; if (outputResult == outputs[i].First()) { correct++; } } Assert.AreEqual(4, correct); }
public override void Initialize() { _Network = new DeepBeliefNetwork(Length + 1, new int[] { _MiddleCount, FrameOut.Length + 1 }); // 斉次座標 new GaussianWeights(_Network).Randomize(); _Network.UpdateVisibleWeights(); InitializeTeacher(); }
public void Train(double[][] i, double[][] o = null, int outputLength = 10, int hiddenLayer = -1) { if (n == null) { if (File.Exists(p)) n = DeepBeliefNetwork.Load(p); else { outputLength = (o == null) ? outputLength : o[0].Length; hiddenLayer = (hiddenLayer == -1) ? (int)Math.Log(i[0].Length, outputLength) : hiddenLayer; List<int> layers = new List<int>(); for (int j = 0; j < hiddenLayer; j++) layers.Add(i[0].Length); layers.Add(outputLength); n = new DeepBeliefNetwork(new BernoulliFunction(), i[0].Length, layers.ToArray()); new GaussianWeights(n).Randomize(); } } dynamic t; if (o == null) { t = new DeepBeliefNetworkLearning(n) { Algorithm = (h, v, j) => new ContrastiveDivergenceLearning(h, v), LayerIndex = n.Machines.Count - 1, }; while (true) e = t.RunEpoch(t.GetLayerInput(i)); } else { t = new DeepNeuralNetworkLearning(n) { Algorithm = (ann, j) => new ParallelResilientBackpropagationLearning(ann), LayerIndex = n.Machines.Count - 1, }; while (true) e = t.RunEpoch(t.GetLayerInput(i), o); } }
public TwitterCheck() { var settings = ConfigurationManager.AppSettings; var blackWords = new List<string>(); var whiteWords = new List<string>(); for (int i = 1; i <= SettingMax; i++) { string black = settings["twitter.blackwords." + i]; if (black == null) continue; foreach (var elem in black.Split(',')) { blackWords.Add(elem); } string white = settings["twitter.whitewords." + i]; if (white == null) continue; foreach (var elem in white.Split(',')) { whiteWords.Add(elem); } } BlackWords = blackWords.AsReadOnly(); WhiteWords = whiteWords.AsReadOnly(); Log.Trace.TraceEvent(TraceEventType.Information, 0, "{0} black words loaded", BlackWords.Count); Log.Trace.TraceEvent(TraceEventType.Information, 0, "{0} white words loaded", WhiteWords.Count); var replaceList = new List<KeyValuePair<string, string>>(); for (int i = 1; i <= SettingMax; i++) { string str = settings["twitter.replace." + i]; if (str == null) continue; foreach (var pair in str.Split(',')) { string[] kv = pair.Split('='); replaceList.Add(new KeyValuePair<string, string>(kv[0], kv[1])); } } ReplaceList = replaceList.AsReadOnly(); Log.Trace.TraceEvent(TraceEventType.Information, 0, "{0} replace entries loaded", ReplaceList.Count); try { dlNetwork = DollsLib.Learning.DataManager.LoadDeepLearning( SettingManager.Settings.Twitter.DlNetTrainError); } catch (Exception) { Log.Trace.TraceEvent(TraceEventType.Warning, 0, "DlNwtwork {0} load failed", SettingManager.Settings.Twitter.DlNetTrainError); } }
/// <summary> /// Creates a Mixed-Bernoulli network. /// </summary> /// /// <param name="visible">The <see cref="IStochasticFunction"/> to be used in the first visible layer.</param> /// <param name="hidden">The <see cref="IStochasticFunction"/> to be used in all other layers.</param> /// /// <param name="inputsCount">The number of inputs for the network.</param> /// <param name="hiddenNeurons">The number of hidden neurons in each layer.</param> /// public static DeepBeliefNetwork CreateMixedNetwork(IStochasticFunction visible, IStochasticFunction hidden, int inputsCount, params int[] hiddenNeurons) { DeepBeliefNetwork network = new DeepBeliefNetwork(hidden, inputsCount, hiddenNeurons); foreach (StochasticNeuron neuron in network.machines[0].Visible.Neurons) { neuron.ActivationFunction = visible; } return(network); }
/// <summary> /// Creates a Gaussian-Bernoulli network. /// </summary> /// /// <param name="inputsCount">The number of inputs for the network.</param> /// <param name="hiddenNeurons">The number of hidden neurons in each layer.</param> /// public static DeepBeliefNetwork CreateGaussianBernoulli(int inputsCount, params int[] hiddenNeurons) { DeepBeliefNetwork network = new DeepBeliefNetwork(inputsCount, hiddenNeurons); GaussianFunction gaussian = new GaussianFunction(); foreach (StochasticNeuron neuron in network.machines[0].Visible.Neurons) { neuron.ActivationFunction = gaussian; } return(network); }
public MainViewModel() { // Create settings for Optidigits dataset Network = new DeepBeliefNetwork(new BernoulliFunction(), 1024, 50, 10); Database = new Optdigits() { IsNormalized = false }; new GaussianWeights(Network).Randomize(); Network.UpdateVisibleWeights(); Learn = new LearnViewModel(this); Use = new UseViewModel(this); Dream = new DreamViewModel(this); Discover = new DiscoverViewModel(this); NewLayerNeurons = 10; }
/// <summary> /// Creates a Mixed-Bernoulli network. /// </summary> /// /// <param name="visible">The <see cref="IStochasticFunction"/> to be used in the first visible layer.</param> /// <param name="hidden">The <see cref="IStochasticFunction"/> to be used in all other layers.</param> /// /// <param name="inputsCount">The number of inputs for the network.</param> /// <param name="hiddenNeurons">The number of hidden neurons in each layer.</param> /// public static DeepBeliefNetwork CreateMixedNetwork(IStochasticFunction visible, IStochasticFunction hidden, int inputsCount, params int[] hiddenNeurons) { DeepBeliefNetwork network = new DeepBeliefNetwork(hidden, inputsCount, hiddenNeurons); foreach (StochasticNeuron neuron in network.machines[0].Visible.Neurons) neuron.ActivationFunction = visible; return network; }
/// <summary> /// Creates a Gaussian-Bernoulli network. /// </summary> /// /// <param name="inputsCount">The number of inputs for the network.</param> /// <param name="hiddenNeurons">The number of hidden neurons in each layer.</param> /// public static DeepBeliefNetwork CreateGaussianBernoulli(int inputsCount, params int[] hiddenNeurons) { DeepBeliefNetwork network = new DeepBeliefNetwork(inputsCount, hiddenNeurons); GaussianFunction gaussian = new GaussianFunction(); foreach (StochasticNeuron neuron in network.machines[0].Visible.Neurons) neuron.ActivationFunction = gaussian; return network; }
/// <summary> /// Creates a new <see cref="DeepBeliefNetworkLearning"/> algorithm. /// </summary> /// /// <param name="network">The network to be trained.</param> /// public DeepNeuralNetworkLearning(DeepBeliefNetwork network) { this.network = network; }
private static DeepBeliefNetwork createNetwork(double[][] inputs) { DeepBeliefNetwork network = new DeepBeliefNetwork(6, 2, 1); network.Machines[0].Hidden.Neurons[0].Weights[0] = 0.00461421; network.Machines[0].Hidden.Neurons[0].Weights[1] = 0.04337112; network.Machines[0].Hidden.Neurons[0].Weights[2] = -0.10839599; network.Machines[0].Hidden.Neurons[0].Weights[3] = -0.06234004; network.Machines[0].Hidden.Neurons[0].Weights[4] = -0.03017057; network.Machines[0].Hidden.Neurons[0].Weights[5] = 0.09520391; network.Machines[0].Hidden.Neurons[0].Threshold = 0; network.Machines[0].Hidden.Neurons[1].Weights[0] = 0.08263872; network.Machines[0].Hidden.Neurons[1].Weights[1] = -0.118437; network.Machines[0].Hidden.Neurons[1].Weights[2] = -0.21710971; network.Machines[0].Hidden.Neurons[1].Weights[3] = 0.02332903; network.Machines[0].Hidden.Neurons[1].Weights[4] = 0.00953116; network.Machines[0].Hidden.Neurons[1].Weights[5] = 0.09870652; network.Machines[0].Hidden.Neurons[1].Threshold = 0; network.Machines[0].Visible.Neurons[0].Threshold = 0; network.Machines[0].Visible.Neurons[1].Threshold = 0; network.Machines[0].Visible.Neurons[2].Threshold = 0; network.Machines[0].Visible.Neurons[3].Threshold = 0; network.Machines[0].Visible.Neurons[4].Threshold = 0; network.Machines[0].Visible.Neurons[5].Threshold = 0; network.UpdateVisibleWeights(); DeepBeliefNetworkLearning target = new DeepBeliefNetworkLearning(network) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) }; for (int layer = 0; layer < 2; layer++) { target.LayerIndex = layer; double[][] layerInputs = target.GetLayerInput(inputs); int iterations = 5000; double[] errors = new double[iterations]; for (int i = 0; i < iterations; i++) errors[i] = target.RunEpoch(layerInputs); } return network; }
public void ConstructorTest() { DeepBeliefNetwork network = new DeepBeliefNetwork(6, 2, 1); Assert.AreEqual(2, network.Machines.Count); Assert.AreEqual(6, network.InputsCount); Assert.AreEqual(1, network.OutputCount); Assert.AreEqual(2, network.Machines[0].Hidden.Neurons.Length); Assert.AreEqual(1, network.Machines[1].Hidden.Neurons.Length); Assert.AreEqual(6, network.Machines[0].Visible.Neurons.Length); Assert.AreEqual(2, network.Machines[1].Visible.Neurons.Length); }
public void PushPopTest() { DeepBeliefNetwork network = new DeepBeliefNetwork(6, 2, 9); Assert.AreEqual(2, network.Machines.Count); Assert.AreEqual(6, network.InputsCount); Assert.AreEqual(9, network.OutputCount); Assert.AreEqual(2, network.Machines[0].Hidden.Neurons.Length); Assert.AreEqual(9, network.Machines[1].Hidden.Neurons.Length); Assert.AreEqual(6, network.Machines[0].Visible.Neurons.Length); Assert.AreEqual(2, network.Machines[1].Visible.Neurons.Length); network.Pop(); Assert.AreEqual(1, network.Machines.Count); Assert.AreEqual(6, network.InputsCount); Assert.AreEqual(2, network.OutputCount); Assert.AreEqual(2, network.Machines[0].Hidden.Neurons.Length); Assert.AreEqual(6, network.Machines[0].Visible.Neurons.Length); network.Push(4); network.Push(10); Assert.AreEqual(3, network.Machines.Count); Assert.AreEqual(6, network.InputsCount); Assert.AreEqual(10, network.OutputCount); Assert.AreEqual(2, network.Machines[0].Hidden.Neurons.Length); Assert.AreEqual(4, network.Machines[1].Hidden.Neurons.Length); Assert.AreEqual(10, network.Machines[2].Hidden.Neurons.Length); Assert.AreEqual(6, network.Machines[0].Visible.Neurons.Length); Assert.AreEqual(2, network.Machines[1].Visible.Neurons.Length); Assert.AreEqual(4, network.Machines[2].Visible.Neurons.Length); network.Pop(); Assert.AreEqual(2, network.Machines.Count); Assert.AreEqual(6, network.InputsCount); Assert.AreEqual(4, network.OutputCount); Assert.AreEqual(2, network.Machines[0].Hidden.Neurons.Length); Assert.AreEqual(4, network.Machines[1].Hidden.Neurons.Length); Assert.AreEqual(6, network.Machines[0].Visible.Neurons.Length); Assert.AreEqual(2, network.Machines[1].Visible.Neurons.Length); }
public override bool Load(string path) { if (!File.Exists(path)) return false; _Network = DeepBeliefNetwork.Load(path); InitializeTeacher(); return true; }
static void Main(string[] args) { double[][] inputs; double[][] outputs; double[][] testInputs; double[][] testOutputs; // Load ascii digits dataset. inputs = DataManager.Load(@"../../../data/data.txt", out outputs); // The first 500 data rows will be for training. The rest will be for testing. testInputs = inputs.Skip(500).ToArray(); testOutputs = outputs.Skip(500).ToArray(); inputs = inputs.Take(500).ToArray(); outputs = outputs.Take(500).ToArray(); // Setup the deep belief network and initialize with random weights. DeepBeliefNetwork network = new DeepBeliefNetwork(inputs.First().Length, 10, 10); new GaussianWeights(network, 0.1).Randomize(); network.UpdateVisibleWeights(); // Setup the learning algorithm. DeepBeliefNetworkLearning teacher = new DeepBeliefNetworkLearning(network) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h, v) { LearningRate = 0.1, Momentum = 0.5, Decay = 0.001, } }; // Setup batches of input for learning. int batchCount = Math.Max(1, inputs.Length / 100); // Create mini-batches to speed learning. int[] groups = Accord.Statistics.Tools.RandomGroups(inputs.Length, batchCount); double[][][] batches = inputs.Subgroups(groups); // Learning data for the specified layer. double[][][] layerData; // Unsupervised learning on each hidden layer, except for the output layer. for (int layerIndex = 0; layerIndex < network.Machines.Count - 1; layerIndex++) { teacher.LayerIndex = layerIndex; layerData = teacher.GetLayerInput(batches); for (int i = 0; i < 200; i++) { double error = teacher.RunEpoch(layerData) / inputs.Length; if (i % 10 == 0) { Console.WriteLine(i + ", Error = " + error); } } } // Supervised learning on entire network, to provide output classification. var teacher2 = new BackPropagationLearning(network) { LearningRate = 0.1, Momentum = 0.5 }; // Run supervised learning. for (int i = 0; i < 500; i++) { double error = teacher2.RunEpoch(inputs, outputs) / inputs.Length; if (i % 10 == 0) { Console.WriteLine(i + ", Error = " + error); } } // Test the resulting accuracy. int correct = 0; for (int i = 0; i < inputs.Length; i++) { double[] outputValues = network.Compute(testInputs[i]); if (DataManager.FormatOutputResult(outputValues) == DataManager.FormatOutputResult(testOutputs[i])) { correct++; } } Console.WriteLine("Correct " + correct + "/" + inputs.Length + ", " + Math.Round(((double)correct / (double)inputs.Length * 100), 2) + "%"); Console.Write("Press any key to quit .."); Console.ReadKey(); }