public void Train() { var network = createNetwork(); IMLDataSet trainingSet = EncogUtility.LoadCSV2Memory(normFile, network.InputCount, 1, false, CSVFormat.English, false); IMLTrain train = new Backpropagation(network, trainingSet); int epoch = 1; int truecases = 0; do { train.Iteration(); Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error); epoch++; } while (train.Error > 0.05); train.FinishTraining(); Console.WriteLine(@"Neural Network Results:"); foreach (IMLDataPair pair in trainingSet) { IMLData output = network.Compute(pair.Input); Console.WriteLine(@" actual=" + output[0] + @",ideal=" + pair.Ideal[0]); if (pair.Ideal[0] == 1 && output[0] > 0.5) { truecases++; } else if (pair.Ideal[0] == 0 && output[0] < 0.5) { truecases++; } } Console.WriteLine(truecases); SerializeObject.Save(networkFile, network); }
public NetworkState TrainNetwork() { int epoch = 0; _trainingSet = new BasicNeuralDataSet(_annInputs, _annOutputs); _basicNetwork = new BasicNetwork(); _basicNetwork.AddLayer(new BasicLayer(null, true, _nInputNeurons)); _basicNetwork.AddLayer(new BasicLayer(new ActivationSigmoid(), true, _nHiddenNeurons)); _basicNetwork.AddLayer(new BasicLayer(new ActivationSigmoid(), false, _nOutputNeurons)); _basicNetwork.Structure.FinalizeStructure(); _basicNetwork.Reset(); //Distribuisco numeri casuali[-1,1] lavorando in ambiente stocastico(non deterministico). //In questo modo il training avviene in maniera casuale, partendo sempre dallo stesso stato. new ConsistentRandomizer(-1, 1, 100).Randomize(_basicNetwork); Backpropagation train = new Backpropagation(_basicNetwork, _trainingSet, LearnRate, Momentum); train.FixFlatSpot = false; do { train.Iteration(); epoch++; _trainError = train.Error; BufferTrainError.Add(_trainError); } while (train.Error > _error); train.FinishTraining(); _neuronsWeight = _basicNetwork.Structure.Network.Flat.Weights.Select(x => System.Convert.ToDouble(x)).ToList(); Make2DNeuronsWeightsMap(); foreach (IMLDataPair pair in _trainingSet) { IMLData output = _basicNetwork.Compute(pair.Input); Console.WriteLine("Input: " + pair.Input[0] + @" - " + pair.Input[1] + @" - " + pair.Input[2]); Console.WriteLine("Output 0: - actual=" + output[0] + @"-ideal=" + pair.Ideal[0]); Console.WriteLine("Output 1: - actual=" + output[1] + @"-ideal=" + pair.Ideal[1]); Console.WriteLine("Output 2: - actual=" + output[2] + @"-ideal=" + pair.Ideal[2]); } return(NetworkState.TRAINED); }
public double[][] Train(double[][] trainingData, double[][] validationData) { var error = new List <double[]>(); PrepareNormalizerFor(trainingData, validationData); var trainingSet = PrepareSet(trainingData); var validationSet = PrepareSet(validationData); var training = new Backpropagation(_network, trainingSet, _settings.LearningRate, _settings.Momentum) { BatchSize = 1 }; for (int epoch = 0; epoch < _settings.Iterations; epoch++) { training.Iteration(); double trainingError = -1; double testingError = -1; if (_settings.Type == ProblemType.Regression) { trainingError = _network.CalculateError(trainingSet); if (validationSet != null) { testingError = _network.CalculateError(validationSet); } } else { trainingError = CalculateClassificationError(trainingSet); if (validationSet != null) { testingError = CalculateClassificationError(validationSet); } } var errorIter = new[] { epoch, trainingError, testingError }; error.Add(errorIter); Console.WriteLine($"Epoch #{epoch} [{training.Error}] TrainingError: {errorIter[1]} ValidationError: {errorIter[2]}"); } training.FinishTraining(); return(error.ToArray()); }
private static BasicNetwork TrainBasicNetwork(BasicNetwork network, BasicMLDataSet trainingSet) { var trainerAlgorithm = new Backpropagation(network, trainingSet, 0.7, 0.2); //var trainerAlgorithm = new ResilientPropagation(network, trainingSet); //var trainerAlgorithm = new ManhattanPropagation(network, trainingSet, 0.001) //var trainerAlgorithm = new ScaledConjugateGradient(network, trainingSet); //var trainerAlgorithm = new LevenbergMarquardtTraining(network, trainingSet); // //var trainerAlgorithm = new QuickPropagation(network, trainingSet, 2.0); // var iteration = 1; do { trainerAlgorithm.Iteration(); iteration++; Console.WriteLine($"Iteration Num : {iteration}, Error : {trainerAlgorithm.Error}"); } while (trainerAlgorithm.Error > 0.001); trainerAlgorithm.FinishTraining(); return(network); }
static IMLDataSet UczSiec(BasicNetwork siec, DaneRegresja doNauki, double wspolczynnikNauki = 0.003, double bezwladnosc = 0.01) { IMLDataSet dataSet = new BasicMLDataSet(doNauki.wejscioweX.ToArray(), doNauki.oczekiwaneY.ToArray()); List <double> bledyTreningu = new List <double>(); IMLTrain train = new Backpropagation(siec, dataSet, wspolczynnikNauki, bezwladnosc); int iter = 1; int maxIter = 5000; do { train.Iteration(); Console.WriteLine("Iteracja #{0} Blad {1:0.0000}", iter, train.Error); bledyTreningu.Add(train.Error); iter++; } while (train.Error >= 0.001 && iter < maxIter); EksportujBledyTreningu(sciezkaRegresjaBledyTreningu, bledyTreningu); train.FinishTraining(); return(dataSet); }
static IMLDataSet UczSiec(BasicNetwork siec, DaneKlasyfikacja doNauki, double wspolczynnikNauki = 0.003, double bezwladnosc = 0.01) //Wspolczynniki domyslne wybrane tak zeby chociaz dzialalo jakkolwiek { IMLDataSet dataSet = new BasicMLDataSet(doNauki.punkty.ToArray(), doNauki.klasyWej.ToArray()); List <double> bledyTreningu = new List <double>(); IMLTrain train = new Backpropagation(siec, dataSet, wspolczynnikNauki, bezwladnosc); int iter = 1; int maxIter = 5000; do { train.Iteration(); Console.WriteLine("Iteracja #{0} Blad {1:0.0000}", iter, train.Error); bledyTreningu.Add(train.Error); iter++; } while (train.Error >= 0.03 && iter < maxIter); EksportujBledyTreningu(sciezkaKlasyfikacjaBledyTreningu, bledyTreningu); train.FinishTraining(); return(dataSet); }
public static void Run() { FileInfo networkFile = new FileInfo(@"D:\Imagery\network\network.eg"); var network = new BasicNetwork(); network.AddLayer(new BasicLayer(null, true, SIZE * SIZE * 3)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, NERUONCOUNT)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 2)); network.Structure.FinalizeStructure(); network.Reset(); if (System.IO.File.Exists(@"D:\Imagery\network\network.eg")) { network = (BasicNetwork)(Encog.Persist.EncogDirectoryPersistence.LoadObject(networkFile)); } Encog.ML.Data.Image.ImageMLDataSet trainingSet = new Encog.ML.Data.Image.ImageMLDataSet(new RGBDownsample(), false, 1, -1); Random rnd = new Random(); //take 1000,, take 5000 from nothing and scramble them List <string> fileEntries = Directory.GetFiles(@"D:\Imagery\_Vege").OrderBy(x => rnd.Next()).Take(1000).ToList(); fileEntries.AddRange(Directory.GetFiles(@"D:\Imagery\_Nothing").OrderBy(x => rnd.Next()).Take(5000).ToArray()); fileEntries = fileEntries.OrderBy(x => rnd.Next()).Take(6000).ToList(); foreach (var file in fileEntries) { var bitmap = new System.Drawing.Bitmap(file); ImageMLData data = new ImageMLData(bitmap); if (file.Contains("_Nothing")) { BasicMLData ideal = new BasicMLData(Nothing); trainingSet.Add(data, ideal); } else { BasicMLData ideal = new BasicMLData(Vegetation); trainingSet.Add(data, ideal); } } trainingSet.Downsample(SIZE, SIZE); IMLTrain train = new Backpropagation(network, trainingSet, .001, 0.02) { }; int epoch = 1; do { train.Iteration(); Console.WriteLine(@"Epoch #" + epoch + @" Error: " + train.Error); epoch++; } while (epoch < 50); train.FinishTraining(); Encog.Persist.EncogDirectoryPersistence.SaveObject(networkFile, (BasicNetwork)network); Encog.ML.Data.Image.ImageMLDataSet testingSet = new Encog.ML.Data.Image.ImageMLDataSet(new RGBDownsample(), false, 1, -1); fileEntries = Directory.GetFiles(@"D:\Imagery\_VegeTest").ToList(); foreach (var file in fileEntries) { ImageMLData data = new ImageMLData(new System.Drawing.Bitmap(file)); BasicMLData ideal = new BasicMLData(Vegetation); testingSet.Add(data, ideal); } fileEntries = Directory.GetFiles(@"D:\Imagery\_NothingTest").ToList(); foreach (var file in fileEntries) { ImageMLData data = new ImageMLData(new System.Drawing.Bitmap(file)); BasicMLData ideal = new BasicMLData(Nothing); testingSet.Add(data, ideal); } testingSet.Downsample(SIZE, SIZE); Console.WriteLine(@"Neural Network Results:"); foreach (IMLDataPair pair in testingSet) { IMLData output = network.Compute(pair.Input); Console.WriteLine(@", actual (" + output[0] + @"," + output[1] + @"),ideal (" + pair.Ideal[0] + @"," + pair.Ideal[1] + ")"); } EncogFramework.Instance.Shutdown(); }