static void Main(string[] args) { ServiceEvaNN serviceEvaNN = new ServiceEvaNN(); NetworkStructure netStructure = new NetworkStructure { InputVectorLength = 10, NeuronsByLayers = new[] { 230, 150, 120, 1 } }; TrainingConfiguration trainConfig = new TrainingConfiguration { TrainingAlgorithmType = TrainingAlgorithmType.RProp, StartIteration = 0, EndIteration = 10, InputDatasetFilename = "TrainingSets//inputSets.txt", OutputDatasetFilename = "TrainingSets//outputSets.txt", MemoryFolder = "Memory" }; bool creatingSucceed = serviceEvaNN.CreateNetwork(trainConfig.MemoryFolder, netStructure); if (creatingSucceed) { //serviceEvaNN.CalculateStatistic(trainConfig); serviceEvaNN.Train(trainConfig, true, ProcessPriorityClass.Normal, true); serviceEvaNN.CalculateStatistic(trainConfig); } Console.WriteLine("Done!"); Console.ReadKey(); }
/// <summary> /// Training FeedForward - NeuralNetwork /// </summary> /// <param name="trainingConfiguration"></param> /// <param name="printLearnStatistic"></param> /// <param name="processPriorityClass"></param> /// <param name="unsafeTrainingMode"></param> /// <param name="iterationsToPause"></param> public void Train(TrainingConfiguration trainingConfiguration, bool printLearnStatistic = false, ProcessPriorityClass processPriorityClass = ProcessPriorityClass.Normal, bool unsafeTrainingMode = false, int iterationsToPause = -1) { // Check for unexistent network: if (_networkTeacher == null) { Logger.LogError(ErrorType.OperationWithNonexistentNetwork, "Training failed!"); return; } // Check for set of iterations to pause: if (iterationsToPause == -1) { iterationsToPause = trainingConfiguration.EndIteration - trainingConfiguration.StartIteration; } // Print warning about using unsafe mode: if (unsafeTrainingMode) { Logger.LogWarning(WarningType.UsingUnsafeTrainingMode); } trainingConfiguration.MemoryFolder = trainingConfiguration.MemoryFolder == "" ? "Memory" : trainingConfiguration.MemoryFolder; // Start process timer: Stopwatch stopWatch = new Stopwatch(); stopWatch.Start(); // Set the process priority class: Process thisProc = Process.GetCurrentProcess(); thisProc.PriorityClass = processPriorityClass; if (_networkTeacher.CheckMemory(trainingConfiguration.MemoryFolder) && _networkTeacher.CheckDatasets(trainingConfiguration.InputDatasetFilename, trainingConfiguration.OutputDatasetFilename, _networkStructure)) { _networkTeacher.TrainNet(trainingConfiguration, iterationsToPause, unsafeTrainingMode); // Stopping timer and print spend time in [HH:MM:SS]: stopWatch.Stop(); TimeSpan ts = stopWatch.Elapsed; string elapsedTime = String.Format("{0:00}:{1:00}:{2:00}", ts.Hours, ts.Minutes, ts.Seconds); Console.WriteLine("Time spend: " + elapsedTime); if (printLearnStatistic) { _networkTeacher.PrintLearningStatistic(trainingConfiguration, true, elapsedTime); } } else { stopWatch.Stop(); Console.WriteLine("Training failed!"); } }
public void CalculateStatistic(TrainingConfiguration trainingConfig) { if (_networkTeacher == null) { Logger.LogError(ErrorType.OperationWithNonexistentNetwork, "Calculate statistic failed!"); return; } _networkTeacher.PrintLearningStatistic(trainingConfig, true); }
private void RefreshAfterMerge() { DataAccess.Configure("WaitForNonStaleData", true); RebuildList(true); DataAccess.Configure("WaitForNonStaleData", false); _availableCategories = null; Notify(() => AvailableCategories); TrainingConfiguration.RefreshCategories(); SearchPanel.RefreshCategories(); }
//[InlineData(10000, 0.0000351085)] public void When_Then(int iterationsCount, double expectedError) { var net = NeuralNetTestSetup.GetConfiguredNet(); var trainingSet = new TrainingSet(new[] { new TrainingSample(new[] { 0.05, 0.10 }, new[] { 0.01, 0.99 }) }); _trainingCfg = new TrainingConfiguration { LearningRate = 0.5, MaxEpoch = iterationsCount }; Backpropagation.Train(net, _trainingCfg, trainingSet, _ => { }); var actualError = ErrorFunctions.TotalError(net, trainingSet); actualError.Should().BeApproximately(expectedError, Precision); }
public void When_Training_Iteration_Performed_Then_TotalError_Decreases() { var net = new NeuralNetwork(2, 2, 1, ActivationFunctions.Sigmoid); var trainingSet = PopularProblems.Xor; var trainingCfg = new TrainingConfiguration { LearningRate = 0.1, MaxEpoch = 10000 }; var trainingErrors = new List <double>(); Backpropagation.Train(net, trainingCfg, trainingSet, trainingErrors.Add); trainingErrors.Should().BeInDescendingOrder(); }
public static void LogTrainingResults(int testPassed, int testFailed, TrainingConfiguration trainConfig, string elapsedTime) { // Check for existing this logs-directory: if (!Directory.Exists(TrainLogsDirectoryName)) { Directory.CreateDirectory(TrainLogsDirectoryName); } // Save log: using (StreamWriter fileWriter = new StreamWriter(TrainLogsDirectoryName + "/" + trainConfig.EndIteration + ".txt")) { fileWriter.WriteLine("Test passed: " + testPassed); fileWriter.WriteLine("Test failed: " + testFailed); fileWriter.WriteLine("Percent learned: {0:f2}", (double)testPassed * 100 / (testPassed + testFailed)); // Writing additional training data: fileWriter.WriteLine("\n============================\n"); // Writing elapsed time: if (elapsedTime != "") { fileWriter.WriteLine("Time spend: " + elapsedTime); fileWriter.WriteLine("Start iteration: " + trainConfig.StartIteration); fileWriter.WriteLine("End iteration: " + trainConfig.EndIteration); fileWriter.WriteLine("============================\n"); } // Writing system characteristics: fileWriter.WriteLine("System characteristics:"); fileWriter.WriteLine("OS Version: {0} {1}", Environment.OSVersion, Environment.Is64BitOperatingSystem ? "64 bit" : "32 bit"); fileWriter.WriteLine("Processors count: " + Environment.ProcessorCount); ManagementClass myManagementClass = new ManagementClass("Win32_Processor"); PropertyDataCollection myProperties = myManagementClass.Properties; foreach (var myProperty in myProperties) { switch (myProperty.Name) { case "CurrentClockSpeed": case "Name": fileWriter.WriteLine($"{myProperty.Name}: { myProperty.Value ?? "-"}"); break; } } } Console.WriteLine("Learn statistic logs saved in {0}!", TrainLogsDirectoryName); }
public void TestTrainingConfiguration() { // Arrange: TrainingConfiguration trainingConfig = new TrainingConfiguration { StartIteration = 0, EndIteration = 1, InputDatasetFilename = "test", OutputDatasetFilename = "test", MemoryFolder = "test", TrainingAlgorithmType = TrainingAlgorithmType.BProp }; // Act & Assert: Assert.IsNotNull(trainingConfig); }
private List <TrainingConfiguration> InitializeTrainingSubConfigs(TrainingConfiguration trainingConfig, int iterationsToPause) { List <TrainingConfiguration> trainingConfigs = new List <TrainingConfiguration>(); int currentIterPosition = trainingConfig.StartIteration; while (true) { if (trainingConfig.EndIteration - currentIterPosition - 1 >= iterationsToPause) { var trainingConfigItem = new TrainingConfiguration { TrainingAlgorithmType = trainingConfig.TrainingAlgorithmType, StartIteration = currentIterPosition, EndIteration = currentIterPosition + iterationsToPause, MemoryFolder = trainingConfig.MemoryFolder, InputDatasetFilename = trainingConfig.InputDatasetFilename, OutputDatasetFilename = trainingConfig.OutputDatasetFilename }; trainingConfigs.Add(trainingConfigItem); currentIterPosition += iterationsToPause; } else { var trainingConfigItem = new TrainingConfiguration { TrainingAlgorithmType = trainingConfig.TrainingAlgorithmType, StartIteration = currentIterPosition, EndIteration = trainingConfig.EndIteration, MemoryFolder = trainingConfig.MemoryFolder, InputDatasetFilename = trainingConfig.InputDatasetFilename, OutputDatasetFilename = trainingConfig.OutputDatasetFilename }; trainingConfigs.Add(trainingConfigItem); break; } } Console.WriteLine("Train sub-configuration objects created!"); return(trainingConfigs); }
public static void Main() { var net = new NeuralNetwork(2, 2, 1, ActivationFunctions.Sigmoid); var trainingCfg = new TrainingConfiguration { LearningRate = 0.1, MaxEpoch = 512000 }; var trainingSet = PopularProblems.Xor; var chartWindow = SimpleChart.LaunchInNewThread(); chartWindow.Title = "Learning progress"; Backpropagation.Train(net, trainingCfg, trainingSet, x => Task.Run(() => chartWindow.AddPoint(x))); Console.WriteLine("Press any key to continue..."); Console.ReadKey(); }
private void CreateDataSets(TrainingConfiguration trainingConfiguration) { // Creating input sets: using (StreamWriter fileWriter = new StreamWriter(trainingConfiguration.InputDatasetFilename)) { fileWriter.WriteLine("0 0"); fileWriter.WriteLine("0 1"); fileWriter.WriteLine("1 0"); fileWriter.Write("1 1"); } // Creating input sets: using (StreamWriter fileWriter = new StreamWriter(trainingConfiguration.OutputDatasetFilename)) { fileWriter.WriteLine("0"); fileWriter.WriteLine("0"); fileWriter.WriteLine("0"); fileWriter.Write("1"); } }
/// <summary> /// Обучение сети /// </summary> /// <param name="trainingConfig"></param> /// <param name="iterationsToPause"></param> /// <param name="unsafeTrainingMode"></param> public void TrainNet(TrainingConfiguration trainingConfig, int iterationsToPause, bool unsafeTrainingMode = false) { Iteration = trainingConfig.EndIteration; #region Load data from file List <double[]> inputDataSets; List <double[]> outputDataSets; try { inputDataSets = FileManager.LoadTrainingDataset(trainingConfig.InputDatasetFilename); outputDataSets = FileManager.LoadTrainingDataset(trainingConfig.OutputDatasetFilename); } catch (Exception ex) { Logger.LogError(ErrorType.SetMissing, ex); return; } #endregion Console.WriteLine("Training start..."); try { List <TrainingConfiguration> trainingConfigs = InitializeTrainingSubConfigs(trainingConfig, iterationsToPause); // Initialize teachers: SingleNetworkTeacher netSubTeacher = new SingleNetworkTeacher { Network = _net, NetworkStructure = _networkStructure, TrainingConfiguration = trainingConfig, InputDatasets = inputDataSets.ToArray(), OutputDatasets = outputDataSets.ToArray(), SafeTrainingMode = !unsafeTrainingMode }; // Iteration multithreading train: for (int j = 0; j < trainingConfigs.Count; j++) { netSubTeacher.TrainingConfiguration = trainingConfigs[j]; Thread thread = new Thread(netSubTeacher.Train); thread.Start(); Wait(thread); if (!netSubTeacher.LastTrainingSuccess) { return; } if (j != trainingConfigs.Count - 1) { Console.WriteLine("Iterations already finished: " + iterationsToPause * (j + 1)); } else { Console.WriteLine("Iterations already finished: " + trainingConfig.EndIteration); } // Test after this iteration's part CommonTest(true); } // Проведение завершающих операций после обучения модели: switch (trainingConfig.TrainingAlgorithmType) { // В случае обучения по генетическому алгоритму - загрузка памяти из файла: case TrainingAlgorithmType.GeneticAlg: case TrainingAlgorithmType.RProp: // Загрузка только что сохраненной памяти: _net = new NeuralNetwork(_networkStructure.NeuronsByLayers, "memory.txt", _networkStructure.Alpha); break; // В общем случае - получение данных обученной сети от "подучителя": case TrainingAlgorithmType.BProp: default: _net = netSubTeacher.Network; break; } Console.WriteLine("Training success!"); } catch (Exception ex) { Logger.LogError(ErrorType.TrainError, ex); } }
/// <summary> /// Printing network's learning statistic /// </summary> /// <param name="trainingConfig"></param> /// <param name="withLogging"></param> public void PrintLearningStatistic(TrainingConfiguration trainingConfig, bool withLogging = false, string elapsedTime = "") { Console.WriteLine("Start calculating statistic..."); // Loading memory: try { _net = new NeuralNetwork(_networkStructure.NeuronsByLayers, "memory.txt", _networkStructure.Alpha); } catch (Exception ex) { Logger.LogError(ErrorType.MemoryInitializeError, ex); } // Testing: int testPassed = 0; int testFailed = 0; #region Load data from file List <double[]> inputDataSets; List <double[]> outputDataSets; try { inputDataSets = FileManager.LoadTrainingDataset(trainingConfig.InputDatasetFilename); outputDataSets = FileManager.LoadTrainingDataset(trainingConfig.OutputDatasetFilename); } catch (Exception ex) { Logger.LogError(ErrorType.SetMissing, ex); return; } #endregion for (int i = 0; i < inputDataSets.Count; i++) { // Получение ответа: string handlingErrorText = ""; double[] netResult = _net.Handle(inputDataSets[i], ref handlingErrorText); if (netResult != null) { if (IsVectorsRoughlyEquals(outputDataSets[i], netResult, 0.3)) { testPassed++; } else { testFailed++; } } else { Logger.LogError(ErrorType.NonEqualsInputLengths, handlingErrorText); return; } } // Logging (optional): if (withLogging) { Logger.LogTrainingResults(testPassed, testFailed, trainingConfig, elapsedTime); } Console.WriteLine("Test passed: {0}\nTest failed: {1}\nPercent learned: {2:f2}", testPassed, testFailed, (double)testPassed * 100 / (testPassed + testFailed)); }
public void TestRProp() { // Arrange: ServiceEvaNN serviceEvaNN = new ServiceEvaNN(); NetworkStructure netStructure = new NetworkStructure { InputVectorLength = 2, NeuronsByLayers = new[] { 23, 15, 1 } }; TrainingConfiguration trainConfig = new TrainingConfiguration { TrainingAlgorithmType = TrainingAlgorithmType.RProp, StartIteration = 0, EndIteration = 100, InputDatasetFilename = "TrainingSetsTest//inputSets.txt", OutputDatasetFilename = "TrainingSetsTest//outputSets.txt", MemoryFolder = "MemoryRProp" }; if (!Directory.Exists("TrainingSetsTest")) { Directory.CreateDirectory("TrainingSetsTest"); CreateDataSets(trainConfig); } if (File.Exists(trainConfig.MemoryFolder + "//memory.txt")) { File.Delete(trainConfig.MemoryFolder + "//memory.txt"); } bool creatingSucceed = serviceEvaNN.CreateNetwork(trainConfig.MemoryFolder, netStructure); if (creatingSucceed) { serviceEvaNN.Train(trainConfig, true, ProcessPriorityClass.Normal, true); } // Asserts: double tolerance = 0.15; double[] result0 = serviceEvaNN.Handle(new double[] { 0, 0 }); Assert.IsNotNull(result0); Assert.AreEqual(0.0, result0[0], tolerance); double[] result1 = serviceEvaNN.Handle(new double[] { 0, 1 }); Assert.IsNotNull(result1); Assert.AreEqual(0.0, result1[0], tolerance); double[] result2 = serviceEvaNN.Handle(new double[] { 1, 0 }); Assert.IsNotNull(result2); Assert.AreEqual(0.0, result2[0], tolerance); double[] result3 = serviceEvaNN.Handle(new double[] { 1, 1 }); Assert.IsNotNull(result3); Assert.AreEqual(1.0, result3[0], tolerance); }