public async Task Organizing() { var networkConfig = new NetworkConfiguration(3, 1); var network = new KohonenNetwork <Logistic>(networkConfig); var learningConfig = new LearningConfiguration { ThetaFactorPerEpoch = 0.95, OrganizingAlgorithm = new Organizing <Logistic>(network, 0.777) }; var learning = new UnsupervisedLearning(network, learningConfig); var inputs = _getInputs(); await learning.LearnAsync(inputs, 25); network.Input(_control[0]); var res0 = await network.GetOutputIndexAsync(); network.Input(_control[1]); var res1 = await network.GetOutputIndexAsync(); network.Input(_control[2]); var res2 = await network.GetOutputIndexAsync(); Assert.NotEqual(res0, res1); Assert.Equal(res1, res2); }
public void ANN() { //var dbn = new DeepBeliefNetwork(new Accord.Neuro.ActivationFunctions.GaussianFunction(), 1, new int[] { 3, 5, 5, 10, 3 }); var dbn = DeepBeliefNetwork.CreateGaussianBernoulli(50, new int[] { 3, 5, 5, 10, 3 }); var dblNode = new UnsupervisedLearning(new DeepBeliefNetworkLearning(dbn) { Algorithm = (h, v, i) => new ContrastiveDivergenceLearning(h,v) { LearningRate = 0.1, Momentum = 0.5, Decay = 0.001 } }); var cout = new ConsoleOut("ANNOut.txt"); dbn.UpdateVisibleWeights(); dblNode.Process.LinkTo(cout.Output); var data = new double[1000000][]; for (int i = 0; i < 1000000; i++) { var curr = new double[50]; for (int j = 0; j < 50; j++) { curr[j] = Math.Sin((double)j); } data[i] = curr; } dblNode.Process.Post(new MachineLearningData<double[][], double>(data)); System.Threading.Thread.Sleep(1000); }
public void Learning() { var networkConfig = new NetworkConfiguration(3, 5); var learningConfig = new LearningConfiguration { ThetaFactorPerEpoch = 0.95, DefaultRepeatsNumber = 25 }; var network = new KohonenNetwork <Logistic>(networkConfig); var learning = new UnsupervisedLearning(network, learningConfig); var inputs = _getInputs(); learning.Learn(inputs); network.Input(_control[0]); var res0 = network.GetOutputIndex(); network.Input(_control[1]); var res1 = network.GetOutputIndex(); network.Input(_control[2]); var res2 = network.GetOutputIndex(); Assert.NotEqual(res0, res1); Assert.Equal(res1, res2); }
public void DBN() { var dbn = DeepBeliefNetwork.CreateGaussianBernoulli(1024, 10); new GaussianWeights(dbn).Randomize(); dbn.UpdateVisibleWeights(); var dblNode = new UnsupervisedLearning(new DeepBeliefNetworkLearning(dbn)); var dataNorm = new Normalizer(); var fromFile = new TextReader(); var dataFile = new System.IO.FileInfo(@"C:\Users\Dan\Documents\Visual Studio 2015\Projects\AccordNETSamples\framework-master\Samples\Neuro\Deep Learning\Resources\optdigits-tra.txt"); // POST DataFile -> TextReader -> ? -> Normalizer -> UnsupervisedLearning var dataProc = new TransformBlock<Instance<string>, IObservable<List<Instance<double>>>>(inst => { var reader = new System.IO.StringReader(inst.Data); var samples = new Subject<List<Instance<double>>>(); var epochSamples = new List<MachineLearningData<double[], double>>(); var buffer = new char[(32 + 1) * 32]; // 32 chars + \n var count = 0; while (true) { var read = reader.ReadBlock(buffer, 0, buffer.Length); var label = reader.ReadLine(); if (read < buffer.Length || label == null) break; var currMLS = new MachineLearningData<double[], double>(); currMLS.Class = Convert.ToInt32(label); count++; } return samples.AsObservable(); }); }