static async Task Main(string[] args) { var csvReader = new CsvReader(); var trainSets = await csvReader.Read(TrainFile); var testSets = await csvReader.Read(TestFile); var nn = new Network(784, 30, 10); var bp = new BackpropagationAlgorithm(nn); var error = 0.0; var count = 0; do { count++; var sw = Stopwatch.StartNew(); error = bp.Train(trainSets, testSets); sw.Stop(); Console.WriteLine(sw.ElapsedMilliseconds); Console.WriteLine($"Count: {count} - MSE: {error}."); } while (error > 0.005); Console.WriteLine(nn.Save()); Console.ReadLine(); }
private void startTraining() { stopSignal = false; if (Multithreading) { backpropagationAlgorithm = new ParallelBackpropagation( NeuralNet, DataItems, LearningRate, Momentum, ErrorThreshold, Environment.ProcessorCount); } else { backpropagationAlgorithm = new Backpropagation( NeuralNet, DataItems, LearningRate, Momentum, ErrorThreshold); } thread = new Thread(new ThreadStart(trainNetwork)); thread.Start(); }
public static void TestBackpropagation() { var NN = NetworkCreator.Perceptron(2, 1, new [] { 20, 20, 20 }); var nums = 100000; var data = new Dictionary <List <double>, List <double> >(); var rand = new Random(); var dif = 1000; var shell = 500; for (var i = 0; i < nums; i++) { var first = rand.Next(dif) - shell; var second = rand.Next(dif) - shell; data.Add(new List <double>() { first, second }, new List <double>() { (first + second + 2 * shell) / 2d / dif, }); } BackpropagationAlgorithm.Teach(NN, data); NN.Print(); for (var i = 0; i < 100; i++) { var first = rand.Next(dif) - shell; var second = rand.Next(dif) - shell; NN.Work(new List <double>() { first, second }); var res = NN.GetResults()[0]; Console.WriteLine(first + " " + second + " . " + (first + second) + " " + (Math.Round((res * dif - shell) * 2))); Console.ReadLine(); } }