public void Train(IMLDataSet training) { ITrain train = new ResilientPropagation(network, training); //SVDTraining train = new SVDTraining(network, training); int epoch = 1; do { train.Iteration(); if ((epoch) % (iterations / 10) == 0) { Console.Write("."); } epoch++; } while (epoch < iterations * 100); }
private void Train(BasicNetwork network, IMLDataSet trainingSet) { if (mTrainingMethod == TrainingMethod.ResilientPropagation) { ITrain train = new ResilientPropagation(network, trainingSet); int epoch = 1; do { train.Iteration(); epoch++; } while (train.Error > mMaxError && epoch < mMaxEpoch); } else if (mTrainingMethod == TrainingMethod.LevenbergMarquardt) { LevenbergMarquardtTraining train = new LevenbergMarquardtTraining(network, trainingSet); int epoch = 1; do { train.Iteration(); epoch++; } while (train.Error > mMaxError && epoch < mMaxEpoch); } else if (mTrainingMethod == TrainingMethod.Backpropagation) { Backpropagation train = new Backpropagation(network, trainingSet); int epoch = 1; do { train.Iteration(); epoch++; } while (train.Error > mMaxError && epoch < mMaxEpoch); } else if (mTrainingMethod == TrainingMethod.ManhattanPropagation) { ManhattanPropagation train = new ManhattanPropagation(network, trainingSet, 0.9); int epoch = 1; do { train.Iteration(); epoch++; } while (train.Error > mMaxError && epoch < mMaxEpoch); } }
/// <summary> /// Evaluate how long it takes to calculate the error for the network. This /// causes each of the training pairs to be run through the network. The /// network is evaluated 10 times and the lowest time is reported. /// </summary> /// <param name="network">The training data to use.</param> /// <param name="training">The number of seconds that it took.</param> /// <returns></returns> public static int EvaluateTrain(BasicNetwork network, IMLDataSet training) { // train the neural network IMLTrain train = new ResilientPropagation(network, training); int iterations = 0; var watch = new Stopwatch(); watch.Start(); while (watch.ElapsedMilliseconds < (10 * Milis)) { iterations++; train.Iteration(); } return(iterations); }
private static void TrainNetwork() { var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(Config.TrainedNetworkFile); var trainingSet = EncogUtility.LoadCSV2Memory(Config.NormalizedTrainingFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false); var train = new ResilientPropagation(network, trainingSet); var epoch = 1; do { train.Iteration(); Console.WriteLine("Epoch : {0} Error : {1}", epoch, train.Error); epoch++; } while (train.Error > 0.01); EncogDirectoryPersistence.SaveObject(Config.TrainedNetworkFile, network); }
private void Learn_Click(object sender, RoutedEventArgs e) { var downsample = new Downsampler(); var training = new ImageMLDataSet(downsample, true, 1, -1); for (var i = 0; i < Images.Count; ++i) { var ideal = new BasicMLData(DIGITS_COUNT); for (int j = 0; j < DIGITS_COUNT; ++j) { if (j == i) { ideal[j] = 1; } else { ideal[j] = -1; } } foreach (var img in Images[i]) { MemoryStream stream = new MemoryStream(); BitmapEncoder encoder = new BmpBitmapEncoder(); encoder.Frames.Add(BitmapFrame.Create(img)); encoder.Save(stream); var bitmap = new Drawing.Bitmap(stream); var data = new ImageMLData(bitmap); training.Add(data, ideal); } } training.Downsample(DIGIT_HEIGHT, DIGIT_WIDTH); network = EncogUtility.SimpleFeedForward(training.InputSize, 35, 0, training.IdealSize, true); double strategyError = 0.01; int strategyCycles = 2000; var train = new ResilientPropagation(network, training); //train.AddStrategy(new ResetStrategy(strategyError, strategyCycles)); EncogUtility.TrainDialog(train, network, training); EncogDirectoryPersistence.SaveObject(new FileInfo("network.eg"), network); }
public void LearnBot() { var rows = learnDbRepository.GetStorageRow(); foreach (var row in rows) { var inputs = createDoubles(row.Request); var outputs = createDoubles(row.Responce); IMLDataSet trainingSet = new BasicMLDataSet(new double[][] { inputs }, new double[][] { outputs }); IMLTrain train = new ResilientPropagation(NeuralNetwork, trainingSet); int epoch = 1; do { train.Iteration(); epoch++; } while (train.Error > 0.0000000001); } }
public void Train(FileInfo networkFile, FileInfo trainingDataFile) { var network = EncogDirectoryPersistence.LoadObject(networkFile) as BasicNetwork; var trainingSet = EncogUtility.LoadCSV2Memory(trainingDataFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false); var trainer = new ResilientPropagation(network, trainingSet); int iter = 1; do { trainer.Iteration(); Console.WriteLine($"\tIteration: {iter++} | Error: {trainer.Error}"); } while (trainer.Error > 0.01); EncogDirectoryPersistence.SaveObject(networkFile, network); }
/// <summary> /// Train the network, to a specific error, send the output to the console. /// </summary> /// <param name="method">The model to train.</param> /// <param name="trainingSet">The training set to use.</param> /// <param name="error">The error level to train to.</param> public static void TrainToError(IMLMethod method, IMLDataSet trainingSet, double error) { IMLTrain train; if (method is SupportVectorMachine) { train = new SVMTrain((SupportVectorMachine)method, trainingSet); } if (method is FreeformNetwork) { train = new FreeformResilientPropagation((FreeformNetwork)method, trainingSet); } else { train = new ResilientPropagation((IContainsFlat)method, trainingSet); } TrainToError(train, error); }
static void Main(string[] args) { var network = new BasicNetwork(); network.AddLayer(new BasicLayer(2)); network.AddLayer(new BasicLayer(3)); network.AddLayer(new BasicLayer(1)); network.Structure.FinalizeStructure(); network.Reset(); var trainingDataSource = new CSVDataSource(@"Data\training.csv", true, ','); //var validationDataSource = new CSVDataSource(@"Data\validation.csv", true, ','); var trainingSet = new VersatileMLDataSet(trainingDataSource); //var validationSet = new VersatileMLDataSet(validationDataSource); trainingSet.Analyze(); trainingSet.Normalize(); var training = new ResilientPropagation(network, trainingSet); int epoch = 1; do { training.Iteration(); Console.WriteLine($"Epoch #{epoch}. Error: {training.Error}"); epoch++; }while (training.Error > 0.01); training.FinishTraining(); Console.WriteLine("Neural Network Results:"); foreach (var pair in trainingSet) { var output = network.Compute(pair.Input); Console.WriteLine($"{pair.Input[0]},{pair.Input[1]}, actual={output[0]}, ideal={pair.Ideal}"); } EncogFramework.Instance.Shutdown(); }
private static void Main(string[] args) { double[][] XOR_Input = { new[] { 0.0, 0.0 }, new[] { 1.0, 0.0 }, new[] { 0.0, 1.0 }, new[] { 1.0, 1.0 } }; double[][] XOR_Ideal = { new[] { 0.0 }, new[] { 1.0 }, new[] { 1.0 }, new[] { 0.0 } }; var traningSet = new BasicMLDataSet(XOR_Input, XOR_Ideal); BasicNetwork network = CreateNetwork(); var train = new ResilientPropagation(network, traningSet); int epoch = 1; do { train.Iteration(); epoch++; Console.WriteLine("Iteration no :{0}, Error: {1}", epoch, train.Error); } while (train.Error > 0.00001); foreach (var item in traningSet) { var output = network.Compute(item.Input); Console.WriteLine("Input: {0},{1}, Ideal: {2}, Actual {3}", item.Input[0], item.Input[1], item.Ideal[0], output[0]); } Console.WriteLine("Press any key to exit.."); Console.ReadLine(); }
public static BasicNetwork setup(BasicNetwork net) { Pond.Pond pond = new Pond.Pond(null, null); List <double[]> inputData = new List <double[]>(); List <double[]> outputData = new List <double[]>(); for (int i = 0; i < 600; i++) { double[] d = new double[8]; BasicMLData data = pond.getInputData(); for (int j = 0; j < 8; j++) { d[j] = data[j]; } Vector2 vect = pond.getLineTarget(); double[] e = new double[2] { (double)vect.X, (double)vect.Y }; inputData.Add(d); outputData.Add(e); } BasicMLDataSet trainingSet = new BasicMLDataSet(inputData.ToArray(), outputData.ToArray()); ResilientPropagation train = new ResilientPropagation(net, trainingSet); int epoch = 0; do { train.Iteration(); Console.WriteLine("Epoch #" + epoch + " Error:" + train.Error); epoch++; } while (train.Error > 0.01); return(net); }
/// <see cref="INetwork.TrainNetwork"/> public INetwork TrainNetwork() { var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(TrainedNetworkFile); var trainingSet = EncogUtility.LoadCSV2Memory(NormalizedTrainingFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false); var train = new ResilientPropagation(network, trainingSet); int epoch = 1; do { train.Iteration(); epoch++; } while (train.Error > Rate || epoch < MaxEpochs); Error = train.Error; EncogDirectoryPersistence.SaveObject(TrainedNetworkFile, (BasicNetwork)network); return(this); }
public static int Evaluate(BasicNetwork network, IMLDataSet training) { ResilientPropagation rprop = new ResilientPropagation(network, training); int iterations = 0; for (; ;) { rprop.Iteration(); iterations++; if (rprop.Error < TARGET_ERROR) { return(iterations); } if (iterations > 1000) { iterations = 0; return(-1); } } }
void Train() { if (Memory.Count > 0) { network.Reset(); double[][] InputData = new double[Memory.Count][]; //подготовка данных для обучения сети double[][] SenseData = new double[Memory.Count][]; for (int i = 0; i < Memory.Count; i++) { InputData[i] = Memory[i]; SenseData[i] = MemorySense[i]; } IMLDataSet trainingSet = new BasicMLDataSet(InputData, SenseData); IMLTrain train = new ResilientPropagation(network, trainingSet); int epoch = 1; double old = 9999; double d = 999; do { train.Iteration(); //Console.SetCursorPosition(0, 0); //вывод информации о текущем состоянии обучения //Console.Write(@"Epoch #" + epoch + @" Error:" + train.Error); epoch++; d = Math.Abs(old - train.Error); old = train.Error; } while (train.Error > 0.0001 && epoch < 3000 && d > 0.00001); train.FinishTraining(); //double sumd=0.0; //подсчет суммарной ошибки после обучения //foreach (IMLDataPair pair in trainingSet) //{ // IMLData output = network.Compute(pair.Input); // sumd = sumd + Math.Abs(pair.Ideal[0] - output[0]); // sumd = sumd / trainingSet.InputSize; //} } }
private static void XORTest() { double[][] XOR_Input = { new[] { 0.0, 0.0 }, new[] { 1.0, 0.0 }, new[] { 0.0, 1.0 }, new[] { 1.0, 1.0 } }; double[][] XOR_Ideal = { new[] { 0.0 }, new[] { 1.0 }, new[] { 1.0 }, new[] { 0.0 } }; var trainingSet = new BasicMLDataSet(XOR_Input, XOR_Ideal); var network = CreateNetwork(); var train = new ResilientPropagation(network, trainingSet); int epoch = 1; do { train.Iteration(); epoch++; Console.WriteLine($"Iteration No: {epoch}, Error: {train.Error}"); } while (train.Error > 0.001); foreach (var item in trainingSet) { var output = network.Compute(item.Input); Console.WriteLine($"Input : {item.Input[0]}, {item.Input[1]}, Ideal: {item.Ideal[0]}, Actual : {output[0]}"); } }
private static void Main(string[] args) { // create a neural network, without using a factory var network = new BasicNetwork(); network.AddLayer(new BasicLayer(null, true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1)); network.Structure.FinalizeStructure(); network.Reset(); // create training data IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal); // train the neural network IMLTrain train = new ResilientPropagation(network, trainingSet); int epoch = 1; do { train.Iteration(); Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error); epoch++; } while (train.Error > 0.01); train.FinishTraining(); // test the neural network Console.WriteLine(@"Neural Network Results:"); foreach (IMLDataPair pair in trainingSet) { IMLData output = network.Compute(pair.Input); Console.WriteLine(pair.Input[0] + @"," + pair.Input[1] + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]); } EncogFramework.Instance.Shutdown(); }
/// <summary> /// Trains the neural network with the passed in training set. /// Receive a list of tuple, where each tuple represent /// Tuple = (State, ExpectedValueForBuy, ExpectedValueForSell, ExpectedValueForWait) /// </summary> /// <param name="trainingSet">The training set.</param> public void Train(IList <Tuple <State, double[]> > trainingSet) { var trainingData = new List <IMLDataPair>(); foreach (var sample in trainingSet) { var flattenState = sample.Item1.ToArray(); var actuals = new BasicMLData(flattenState); var ideals = new BasicMLData(sample.Item2); trainingData.Add(new BasicMLDataPair(actuals, ideals)); } IMLDataSet dataSet = new BasicMLDataSet(trainingData); //IMLTrain train = new Backpropagation(NeuralNetwork, dataSet, Parameters.LearningRate, Parameters.LearningMomemtum); IMLTrain train = new ResilientPropagation(NeuralNetwork, dataSet); int epoch = 1; do { train.Iteration(); epoch++; OnTrainingEpochComplete?.Invoke(this, new OnTrainingEpochCompleteArgs() { Epoch = epoch, Error = train.Error }); } while (train.Error > Parameters.TrainingError && epoch < Parameters.MaxIterationPerTrainging); //foreach (var item in dataSet) //{ // var output = NeuralNetwork.Compute(item.Input); // Console.WriteLine("output: {0} - {1} - {2} | ideal: {3} - {4} - {5}", output[0], output[1], output[2], item.Ideal[0], item.Ideal[1], item.Ideal[2]); //} }
private static void Main(string[] args) { CSVReader reader = new CSVReader(); DataSet ds = reader.ReadCSVFile(FILENAME, true); dt = ds.Tables["Table1"]; // create a neural network, without using a factory var network = new BasicNetwork(); network.AddLayer(new BasicLayer(null, true, 17)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 2)); network.Structure.FinalizeStructure(); network.Reset(); Dictionarys dict = new Dictionarys(); // create training dat IMLDataSet dataSet = dict.GetDataSet(dt); // train the neural network IMLTrain train = new ResilientPropagation(network, dataSet); int epoch = 1; do { train.Iteration(); Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error); epoch++; } while (train.Error > 0.01); train.FinishTraining(); // test the neural network Console.WriteLine(@"Neural Network Results:"); foreach (IMLDataPair pair in dataSet) { IMLData output = network.Compute(pair.Input); Console.WriteLine(pair.Input[0] + @"," + pair.Input[1] + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]); } EncogFramework.Instance.Shutdown(); }
public void Run() { //Se crea la red neuronal con sus respectivas capas var network = new BasicNetwork(); network.AddLayer(new BasicLayer(null, true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 6)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2)); network.Structure.FinalizeStructure(); network.Reset(); //Crear el conjunto de entrenamiento IMLDataSet conjuntoEntrenamiento = new BasicMLDataSet(entradas, salidas); //Entrenar IMLTrain train = new ResilientPropagation(network, conjuntoEntrenamiento); int epoch = 1; do { train.Iteration(); Console.WriteLine("Epoca #" + epoch + " Error:" + train.Error); epoch++; } while (train.Error > 0.001); //Prueba de la red neuronal Console.WriteLine("Resultados:"); foreach (IMLDataPair pair in conjuntoEntrenamiento) { IMLData output = network.Compute(pair.Input); Console.WriteLine(pair.Input[0] + @"," + pair.Input[1] + @", actual=" + output[0] + "," + output[1] + @",ideal=" + pair.Ideal[0] + "," + pair.Ideal[1]); } IMLData dataprueba = new BasicMLData(new double[] { 2.4, 2.5 }); var prueba = network.Compute(dataprueba); }
public NeuralNetworkModel(VersatileMLDataSet dataset) { dataset.NormHelper.NormStrategy = new BasicNormalizationStrategy(0, 1, 0, 1); dataset.Normalize(); var inputs = dataset.NormHelper.InputColumns.Count; var outputs = dataset.NormHelper.OutputColumns.Count; var hiddens = (inputs + outputs) * 1.5; var method = (BasicNetwork) new MLMethodFactory().Create( MLMethodFactory.TypeFeedforward, $"?:B->SIGMOID->{hiddens}:B->SIGMOID->?", inputs, outputs); var folds = new FoldedDataSet(dataset); folds.Fold(5); var propTrainer = new ResilientPropagation(method, folds); _kfoldTrainer = new CrossValidationKFold(propTrainer, 5); }
/// <summary> /// Evaluate how long it takes to calculate the error for the network. This /// causes each of the training pairs to be run through the network. The /// network is evaluated 10 times and the lowest time is reported. /// </summary> /// <param name="network">The training data to use.</param> /// <param name="training">The number of seconds that it took.</param> /// <returns></returns> public static int EvaluateTrain(BasicNetwork network, IMLDataSet training) { // train the neural network IMLTrain train = new ResilientPropagation(network, training); int iterations = 0; const int milis10 = Milis * 10; var watch = new Stopwatch(); watch.Start(); while (true) { iterations++; train.Iteration(); if ((iterations & 0xff) == 0 && watch.ElapsedMilliseconds < milis10) { break; } } return(iterations); }
public static void Run() { var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(Config.NetworkFile); var trainingSet = EncogUtility.LoadEGB2Memory(Config.TrainingFile); while (true) { Propagation train = new ResilientPropagation( network, trainingSet) { ThreadCount = 0, FixFlatSpot = false }; EncogUtility.TrainConsole(train, network, trainingSet, TimeSpan.FromMinutes(10).TotalSeconds); Console.WriteLine("Finished. Saving network..."); EncogDirectoryPersistence.SaveObject(Config.NetworkFile, network); Console.WriteLine(@"Network saved."); } }