public void CanTransferWeightsInsideNetwrok() { var o1 = new RMSPropOptimizer <float>(1e-3f); var n1 = new LayeredNet <float>(1, 1, new GruLayer <float>(2, 3), new LinearLayer <float>(3, 2), new SoftMaxLayer <float>(2)) { Optimizer = o1 }; var n2 = (LayeredNet <float>)n1.Clone(); var o2 = new RMSPropOptimizer <float>(1e-3f); n2.Optimizer = o2; n1.UseGpu(); TrainXor(n1); n1.TransferStateToHost(); TrainXor(n2); var w1 = n1.Weights.ToList(); var w2 = n2.Weights.ToList(); for (int i = 0; i < w1.Count; i++) { w1[i].Weight.ShouldMatrixEqualWithinError(w2[i].Weight); w1[i].Gradient.ShouldMatrixEqualWithinError(w2[i].Gradient); w1[i].Cache1.ShouldMatrixEqualWithinError(w2[i].Cache1); w1[i].Cache2.ShouldMatrixEqualWithinError(w2[i].Cache2); w1[i].CacheM.ShouldMatrixEqualWithinError(w2[i].CacheM); } }
public void Mnist([DefaultValue(true)] bool gui) { const int batchSize = 128; const int hSize = 20; MklProvider.TryUseMkl(true, ConsoleProgressWriter.Instance); string dataDir = Path.Combine(Path.GetTempPath(), "Retia_datasets", "MNIST"); DownloadDataset(dataDir); Console.WriteLine("Loading training set"); var trainSet = LoadTrainingSet(dataDir); trainSet.BatchSize = batchSize; var network = new LayeredNet <float>(batchSize, 1, new AffineLayer <float>(trainSet.InputSize, hSize, AffineActivation.Sigmoid), new LinearLayer <float>(hSize, trainSet.TargetSize), new SoftMaxLayer <float>(trainSet.TargetSize)); var optimizer = new AdamOptimizer <float>(); network.Optimizer = optimizer; var trainer = new OptimizingTrainer <float>(network, optimizer, trainSet, new OptimizingTrainerOptions(1) { ErrorFilterSize = 100, MaxEpoch = 1, ProgressWriter = ConsoleProgressWriter.Instance, ReportProgress = new EachIteration(100), ReportMesages = true }, new OptimizingSession("MNIST")); RetiaGui retiaGui; if (gui) { retiaGui = new RetiaGui(); retiaGui.RunAsync(() => new TrainingWindow(new TypedTrainingModel <float>(trainer))); } var runner = ConsoleRunner.Create(trainer, network); runner.Run(); }
private void TrainXor(LayeredNet <float> net) { var trainer = new OptimizingTrainer <float>(net, net.Optimizer, new XorDataset(true), new OptimizingTrainerOptions(1) { ErrorFilterSize = 0, ReportProgress = ActionSchedule.Disabled, ReportMesages = false }, new OptimizingSession(false)); var cts = new CancellationTokenSource(); trainer.SequenceTrained += s => { cts.Cancel(); }; trainer.Train(cts.Token).Wait(); }
public void Xor() { MklProvider.TryUseMkl(true, ConsoleProgressWriter.Instance); var optimizer = new RMSPropOptimizer <float>(1e-3f); var net = new LayeredNet <float>(1, 1, new AffineLayer <float>(2, 3, AffineActivation.Tanh), new AffineLayer <float>(3, 1, AffineActivation.Tanh) { ErrorFunction = new MeanSquareError <float>() }) { Optimizer = optimizer }; var trainer = new OptimizingTrainer <float>(net, optimizer, new XorDataset(true), new OptimizingTrainerOptions(1) { ErrorFilterSize = 0, ReportProgress = new EachIteration(1), ReportMesages = true, ProgressWriter = ConsoleProgressWriter.Instance, LearningRateScaler = new ProportionalLearningRateScaler(new EachIteration(1), 9e-5f) }, new OptimizingSession("XOR")); var runner = ConsoleRunner.Create(trainer, net); trainer.TrainReport += (sender, args) => { if (args.Errors.Last().RawError < 1e-7f) { runner.Stop(); Console.WriteLine("Finished training."); } }; var gui = new RetiaGui(); gui.RunAsync(() => new TrainingWindow(new TypedTrainingModel <float>(trainer))); runner.Run(); }
private static LayeredNet <float> CreateNetwork(string fileName) { return(LayeredNet <float> .Load(fileName)); }