Esempio n. 1
0
        public void TrainOnMemory(IConcurrentMemory <TData> memory)
        {
            var trainingData = _dataBuilder.BuildDataset(memory);

            if (trainingData == null || trainingData.Count == 0)
            {
                return;
            }

            var clonedInstance = _network.Clone();
            // Train the network
            var result = NetworkManager.TrainNetwork(clonedInstance,
                                                     trainingData,
                                                     TrainingAlgorithms.AdaDelta(),
                                                     _configuration.Epochs, 0.5f,
                                                     TrackBatchProgress,
                                                     TrainingProgress);

            Console.WriteLine("\nTraining session completed, moving to next one");

            var backupName = $"backup-network-{DateTime.Now:yyyyMMdd-HH-mm-ss-fff}.modl";

            _network.Save(File.Create(backupName));
            Console.WriteLine($"Backup model {backupName} saved");
            _network = clonedInstance;
        }
Esempio n. 2
0
        public static async Task Main()
        {
            // Create the network
            INeuralNetwork network = NetworkManager.NewSequential(TensorInfo.Image <Alpha8>(28, 28),
                                                                  NetworkLayers.Convolutional((5, 5), 20, ActivationType.Identity),
                                                                  NetworkLayers.Pooling(ActivationType.LeakyReLU),
                                                                  NetworkLayers.FullyConnected(100, ActivationType.LeCunTanh),
                                                                  NetworkLayers.Softmax(10));

            // Prepare the dataset
            ITrainingDataset trainingData = await Mnist.GetTrainingDatasetAsync(100); // Batches of 100 samples

            ITestDataset testData = await Mnist.GetTestDatasetAsync(p => Printf($"Epoch {p.Iteration}, cost: {p.Result.Cost}, accuracy: {p.Result.Accuracy}"));

            if (trainingData == null || testData == null)
            {
                Printf("Error downloading the datasets");
                Console.ReadKey();
                return;
            }

            // Train the network
            TrainingSessionResult result = await NetworkManager.TrainNetworkAsync(network,
                                                                                  trainingData,
                                                                                  TrainingAlgorithms.AdaDelta(),
                                                                                  60, 0.5f,
                                                                                  TrackBatchProgress,
                                                                                  testDataset : testData);

            Printf($"Stop reason: {result.StopReason}, elapsed time: {result.TrainingTime}");
            Console.ReadKey();
        }
Esempio n. 3
0
        public static async Task Main()
        {
            // Create the network
            INeuralNetwork network = NetworkManager.NewSequential(TensorInfo.Image <Alpha8>(28, 28),
                                                                  CuDnnNetworkLayers.Convolutional((5, 5), 20, ActivationType.Identity),
                                                                  CuDnnNetworkLayers.Pooling(ActivationType.LeakyReLU),
                                                                  CuDnnNetworkLayers.Convolutional((3, 3), 40, ActivationType.Identity),
                                                                  CuDnnNetworkLayers.Pooling(ActivationType.LeakyReLU),
                                                                  CuDnnNetworkLayers.FullyConnected(125, ActivationType.LeCunTanh),
                                                                  CuDnnNetworkLayers.Softmax(10));

            // Prepare the dataset
            ITrainingDataset trainingData = await Mnist.GetTrainingDatasetAsync(400); // Batches of 400 samples

            ITestDataset testData = await Mnist.GetTestDatasetAsync(p => Printf($"Epoch {p.Iteration}, cost: {p.Result.Cost}, accuracy: {p.Result.Accuracy}"));

            if (trainingData == null || testData == null)
            {
                Printf("Error downloading the datasets");
                Console.ReadKey();
                return;
            }

            // Setup and network training
            CancellationTokenSource cts = new CancellationTokenSource();

            Console.CancelKeyPress += (s, e) => cts.Cancel();
            TrainingSessionResult result = await NetworkManager.TrainNetworkAsync(network,
                                                                                  trainingData,
                                                                                  TrainingAlgorithms.AdaDelta(),
                                                                                  20, 0.5f,
                                                                                  TrackBatchProgress,
                                                                                  testDataset : testData, token : cts.Token);

            // Save the training reports
            string
                timestamp = DateTime.Now.ToString("yy-MM-dd-hh-mm-ss"),
                path      = Path.GetDirectoryName(Path.GetFullPath(Assembly.GetExecutingAssembly().Location)),
                dir       = Path.Combine(path ?? throw new InvalidOperationException("The dll path can't be null"), "TrainingResults", timestamp);

            Directory.CreateDirectory(dir);
            File.WriteAllText(Path.Combine(dir, $"{timestamp}_cost.py"), result.TestReports.AsPythonMatplotlibChart(TrainingReportType.Cost));
            File.WriteAllText(Path.Combine(dir, $"{timestamp}_accuracy.py"), result.TestReports.AsPythonMatplotlibChart(TrainingReportType.Accuracy));
            network.Save(new FileInfo(Path.Combine(dir, $"{timestamp}{NetworkLoader.NetworkFileExtension}")));
            File.WriteAllText(Path.Combine(dir, $"{timestamp}.json"), network.SerializeMetadataAsJson());
            File.WriteAllText(Path.Combine(dir, $"{timestamp}_report.json"), result.SerializeAsJson());
            Printf($"Stop reason: {result.StopReason}, elapsed time: {result.TrainingTime}");
            Console.ReadKey();
        }
Esempio n. 4
0
        public TrainingSessionResult Train(ITrainingDataset data, ITestDataset testData)
        {
            INeuralNetwork net = NetworkManager.NewSequential(TensorInfo.Linear(SubHistory.SubHistoryLength),
                                                              NetworkLayers.FullyConnected(SubHistory.SubHistoryLength, ActivationType.LeCunTanh),
                                                              NetworkLayers.Softmax(IMoveEngine.Payoffs));
            TrainingSessionResult result = NetworkManager.TrainNetwork(net,
                                                                       data,
                                                                       TrainingAlgorithms.AdaDelta(),
                                                                       100, 0.0f,
                                                                       null,
                                                                       testDataset: testData);

            if (result.StopReason == TrainingStopReason.EpochsCompleted)
            {
                _storage.Save(net);
                _network = net;
            }
            return(result);
        }
        // Tries to process a random batch through the network (this method should just not crash)
        private static void ValidateGraph([NotNull] INeuralNetwork network)
        {
            float[,]
            x = new float[200, network.InputInfo.Size],
            y = new float[200, network.OutputInfo.Size];
            for (int i = 0; i < 200; i++)
            {
                for (int j = 0; j < x.GetLength(1); j++)
                {
                    x[i, j] = ThreadSafeRandom.NextFloat();
                }
                y[i, ThreadSafeRandom.NextInt(max: y.GetLength(1))] = 1;
            }
            _ = network.Forward(x);
            SamplesBatch            batch = new SamplesBatch(x, y);
            ComputationGraphNetwork graph = network.To <INeuralNetwork, ComputationGraphNetwork>();

            graph.Backpropagate(batch, 0.5f, WeightsUpdaters.AdaDelta(TrainingAlgorithms.AdaDelta(), graph));
            _ = network.ExtractDeepFeatures(x);
        }
Esempio n. 6
0
 public override ITrainingAlgorithmInfo GetTrainingAlgorithm()
 {
     return(TrainingAlgorithms.Momentum(this.LearningRate, this.Regularization, this.Coefficient));
 }
Esempio n. 7
0
 public ITrainingAlgorithmInfo GetTrainingAlgorithm()
 {
     return(TrainingAlgorithms.AdaDelta(this.Rho, this.Epsilon, this.L2));
 }
Esempio n. 8
0
 public ITrainingAlgorithmInfo GetTrainingAlgorithm()
 {
     return(TrainingAlgorithms.Adam(this.Eta, this.Beta1, this.Beta2, this.Epsilon));
 }
Esempio n. 9
0
 public ITrainingAlgorithmInfo GetTrainingAlgorithm()
 {
     return(TrainingAlgorithms.RMSProp(this.Eta, this.Rho, this.Lambda, this.Epsilon));
 }
Esempio n. 10
0
 public ITrainingAlgorithmInfo GetTrainingAlgorithm()
 {
     return(TrainingAlgorithms.AdaGrad(this.Eta, this.Lambda, this.Epsilon));
 }
 public virtual ITrainingAlgorithmInfo GetTrainingAlgorithm()
 {
     return(TrainingAlgorithms.StochasticGradientDescent(this.LearningRate, this.Regularization));
 }
Esempio n. 12
0
 public ITrainingAlgorithmInfo GetTrainingAlgorithm()
 {
     return(TrainingAlgorithms.AdaMax(this.Eta, this.Beta1, this.Beta2));
 }