private async Task <double> train_networkAsync(int training_size, int epoches, double acceptable_error, bool parallel = true) { // Выключаем всё ненужное label1.Text = "Выполняется обучение..."; label1.ForeColor = Color.Red; groupBox1.Enabled = false; pictureBox1.Enabled = false; trainOneButton.Enabled = false; // Создаём новую обучающую выборку SamplesSet samples = new SamplesSet(); for (int i = 0; i < training_size; i++) { samples.AddSample(generator.GenerateFigure()); } // Обучение запускаем асинхронно, чтобы не блокировать форму double f = await Task.Run(() => net.TrainOnDataSet(samples, epoches, acceptable_error, parallel)); label1.Text = "Щелкните на картинку для теста нового образа"; label1.ForeColor = Color.Green; groupBox1.Enabled = true; pictureBox1.Enabled = true; trainOneButton.Enabled = true; StatusLabel.Text = "Accuracy: " + f.ToString(); StatusLabel.ForeColor = Color.Green; return(f); }
private void button2_Click(object sender, EventArgs e) { this.Enabled = false; // Тут просто тестирование новой выборки // Создаём новую обучающую выборку SamplesSet samples = new SamplesSet(); for (int i = 0; i < (int)TrainingSizeCounter.Value; i++) { samples.AddSample(generator.GenerateFigure()); } double accuracy = net.TestOnDataSet(samples); StatusLabel.Text = string.Format("Точность на тестовой выборке : {0,5:F2}%", accuracy * 100); if (accuracy * 100 >= AccuracyCounter.Value) { StatusLabel.ForeColor = Color.Green; } else { StatusLabel.ForeColor = Color.Red; } this.Enabled = true; }
/// <summary> /// Дрессируем сеть на заданном датасете /// </summary> /// <param name="samplesSet">Обучающая выборка</param> /// <param name="epochs_count">Количество проходов по обучающей выборке</param> /// <param name="acceptable_erorr">Допустимая ошибка</param> /// <returns>Процент верно распознанных образов на последней итерации</returns> public double TrainOnDataSet(SamplesSet samplesSet, int epochs_count, double acceptable_erorr) { double guessLevel = 0; do { guessLevel = 0; for (int i = 0; i < samplesSet.samples.Count; ++i) { if (Train(samplesSet.samples.ElementAt(i)) == 0) { guessLevel += 1; } } // Тут просто процент верно распознанных образов guessLevel /= samplesSet.samples.Count; if (guessLevel > acceptable_erorr) { return(guessLevel); } epochs_count--; } while (epochs_count > 0); // Возвращаем результат return(guessLevel); }
public override double TrainOnDataSet(SamplesSet samplesSet, int epochs_count, double acceptableError, bool parallel = true) { // Сначала надо сконструировать массивы входов и выходов double[][] inputs = new double[samplesSet.Count][]; double[][] outputs = new double[samplesSet.Count][]; // Теперь массивы из samplesSet группируем в inputs и outputs for (int i = 0; i < samplesSet.Count; ++i) { inputs[i] = samplesSet[i].input; outputs[i] = samplesSet[i].output; } // Текущий счётчик эпох int epoch_to_run = 0; // Создаём "обучателя" - либо параллельного, либо последовательного ISupervisedLearning teacher; if (parallel) { teacher = new ParallelResilientBackpropagationLearning(network); } else { teacher = new ResilientBackpropagationLearning(network); } double error = double.PositiveInfinity; #if DEBUG StreamWriter errorsFile = File.CreateText("errors.csv"); #endif stopWatch.Restart(); while (epoch_to_run < epochs_count && error > acceptableError) { epoch_to_run++; error = teacher.RunEpoch(inputs, outputs); #if DEBUG errorsFile.WriteLine(error); #endif updateDelegate((epoch_to_run * 1.0) / epochs_count, error, stopWatch.Elapsed); } #if DEBUG errorsFile.Close(); #endif updateDelegate(1.0, error, stopWatch.Elapsed); stopWatch.Stop(); return(error); }
public override double TestOnDataSet(SamplesSet testSet) { double correct = 0.0; for (var i = 0; i < testSet.Count; ++i) { testSet[i].output = network.Compute(testSet[i].input); testSet[i].ProcessOutput(); if (testSet[i].actualClass == testSet[i].recognizedClass) { correct += 1; } } return(correct / testSet.Count); }
public SamplesSet ConvertSet(string path) { SamplesSet set = new SamplesSet(); for (int i = 0; i < 300; i++) { for (int j = 0; j < 3000; j += 300) { if (i == 0 && j == 0) { continue; } Bitmap cur_img = new Bitmap(path + (i + j).ToString() + ".jpg"); set.AddSample(Convert(cur_img, (FigureType)(j / 300))); } } return(set); }
public double TestOnDataSet(SamplesSet testSet) { if (testSet.Count == 0) { return(double.NaN); } double guessLevel = 0; for (int i = 0; i < testSet.Count; ++i) { Sample s = testSet.samples.ElementAt(i); predict(s); if (s.Correct()) { guessLevel += 1; } } return(guessLevel / testSet.Count); }
public override double TrainOnDataSet(SamplesSet samplesSet, int epochsCount, double acceptableError, bool parallel = true) { var start = DateTime.Now; for (var i = 0; i < epochsCount; i++) { foreach (var sample in samplesSet.samples) { Train(sample); Console.WriteLine(i); } updateDelegate( (i + 1.0) / epochsCount, 0.0, DateTime.Now - start ); } return(0.0); }
public override double TrainOnDataSet(SamplesSet samplesSet, int epochs_count, double acceptable_erorr, bool parallel = true) { var guessLevel = 0.0; while (epochs_count > 0) { foreach (var sample in samplesSet) { if (Train((Sample)sample) == 0) { guessLevel += 1; } } guessLevel /= samplesSet.samples.Count; if (guessLevel > acceptable_erorr) { return(guessLevel); } epochs_count--; } return(guessLevel); }
public abstract double TestOnDataSet(SamplesSet testSet);
public abstract double TrainOnDataSet(SamplesSet samplesSet, int epochs_count, double acceptableError, bool parallel = true);
public override double TrainOnDataSet(SamplesSet samplesSet, int epochs_count, double acceptable_erorr, bool parallel = true) { int epoch_to_run = 0; double alpha = 0.15; double[][][] delta_weights; delta_weights = new double[network.weights.Length][][]; if (parallel) { System.Threading.Tasks.Parallel.For(0, network.weights.Length, (i) => { delta_weights[i] = new double[network.weights[i].Length][]; for (int j = 0; j < network.weights[i].Length; j++) { delta_weights[i][j] = new double[network.weights[i][j].Length]; } }); } else { for (int i = 0; i < network.weights.Length; i++) { delta_weights[i] = new double[network.weights[i].Length][]; for (int j = 0; j < network.weights[i].Length; j++) { delta_weights[i][j] = new double[network.weights[i][j].Length]; } } } stopWatch.Restart(); double error = double.PositiveInfinity; while (epoch_to_run < epochs_count && error > acceptable_erorr) { epoch_to_run++; for (int u = 0; u < samplesSet.Count; u++) { samplesSet[u].output = network.Compute(samplesSet[u].input); samplesSet[u].processOutput(); double[] deltas = new double[network.outputs.Length]; //идем в обратную сторону if (parallel) { System.Threading.Tasks.Parallel.For(0, network.outputs.Length, (i) => { double der = (1 - network.outputs[i]) * network.outputs[i]; deltas[i] = ((i == (int)samplesSet[u].actualClass ? 1 : 0) - network.outputs[i]) * der; }); } else { for (int i = 0; i < network.outputs.Length; i++) { double der = (1 - network.outputs[i]) * network.outputs[i]; deltas[i] = ((i == (int)samplesSet[u].actualClass ? 1 : 0) - network.outputs[i]) * der; } } //A - слои for (int i = network.weights.Length - 1; i >= 1; i--) { double[] nextdeltas = new double[network.weights[i][0].Length]; if (parallel) { System.Threading.Tasks.Parallel.For(0, network.weights[i][0].Length, (j) => { double der = (1 - network.layersValue[i - 1][j]) * network.layersValue[i - 1][j]; double sum = 0; for (int k = 0; k < deltas.Length; k++) { sum += deltas[k] * network.weights[i][k][j]; } nextdeltas[j] = der * sum; for (int k = 0; k < deltas.Length; k++) { delta_weights[i][k][j] = network.learningRate * deltas[k] * network.layersValue[i - 1][j] + alpha * delta_weights[i][k][j]; network.weights[i][k][j] += delta_weights[i][k][j]; } }); } else { for (int j = 0; j < network.weights[i][0].Length; j++) { double der = (1 - network.layersValue[i - 1][j]) * network.layersValue[i - 1][j]; double sum = 0; for (int k = 0; k < deltas.Length; k++) { sum += deltas[k] * network.weights[i][k][j]; } nextdeltas[j] = der * sum; for (int k = 0; k < deltas.Length; k++) { delta_weights[i][k][j] = network.learningRate * deltas[k] * network.layersValue[i - 1][j] + alpha * delta_weights[i][k][j]; network.weights[i][k][j] += delta_weights[i][k][j]; } } } deltas = new double[nextdeltas.Length - 1]; for (int j = 0; j < nextdeltas.Length - 1; j++) { deltas[j] = nextdeltas[j]; } } //input слой if (parallel) { System.Threading.Tasks.Parallel.For(0, network.weights[0][0].Length, (j) => { for (int k = 0; k < deltas.Length; k++) { delta_weights[0][k][j] = network.learningRate * deltas[k] * network.inputs[j] + alpha * delta_weights[0][k][j]; network.weights[0][k][j] += delta_weights[0][k][j]; } }); } else { for (int j = 0; j < network.weights[0][0].Length; j++) { for (int k = 0; k < deltas.Length; k++) { delta_weights[0][k][j] = network.learningRate * deltas[k] * network.inputs[j] + alpha * delta_weights[0][k][j]; network.weights[0][k][j] += delta_weights[0][k][j]; } } } } error = samplesSet[samplesSet.Count - 1].EstimatedError(); updateDelegate((epoch_to_run * 1.0) / epochs_count, error, stopWatch.Elapsed); } updateDelegate(1.0, error, stopWatch.Elapsed); stopWatch.Stop(); return(error); }
public override double TestOnDataSet(SamplesSet testSet) { throw new NotImplementedException(); }
private void button2_Click(object sender, EventArgs e) { SamplerConverter sc = new SamplerConverter(); ss = sc.ConvertSet(@"..\..\NewImages\"); }