private void button4_Click(object sender, EventArgs e) { if (trainingData.Count == 0) { var rnd = new Random(); for (int i = 0; i < 10000; i++) { float[] input = new float[5]; float[] output = new float[5]; float j = (float)rnd.NextDouble() * 5; int jint = Math.Min(4, (int)Math.Floor(j)); input[jint] = 1.0f; output[jint] = 1.0f; trainingData.Add(new TrainingSuite.TrainingData(input, output)); } } var trainingSuite = new TrainingSuite(trainingData); trainingSuite.config.miniBatchSize = 6; trainingSuite.config.epochs = (int)numericUpDown6.Value; trainingSuite.config.shuffleTrainingData = false; trainingSuite.config.regularization = TrainingSuite.TrainingConfig.Regularization.None; trainingSuite.config.costFunction = new CrossEntropyErrorFunction(); trainingPromise = solver.Train(trainingSuite, calculator); progressBar1.Value = 0; label2.Text = "Training..."; trainingBegin = DateTime.Now; timer1.Start(); }
private static void Train(ComputeDevice selectedDevice, int epochs) { List <TrainingSuite.TrainingData> trainingData = new List <TrainingSuite.TrainingData>(); for (int i = 0; i < 10000; i++) { float[] input = new float[1]; float[] desiredOutput = new float[1]; float rnd = (float)random.NextDouble(); input[0] = rnd; desiredOutput[0] = (float)Math.Sin((rnd - 0.5f) * 4.0f) * 0.5f + 0.5f; trainingData.Add(new TrainingSuite.TrainingData(input, desiredOutput)); } TrainingSuite suite = new TrainingSuite(trainingData); suite.config.epochs = epochs; suite.config.shuffleTrainingData = true; suite.config.miniBatchSize = 100; suite.config.costFunction = new CrossEntropyErrorFunction(); suite.config.regularization = TrainingConfig.Regularization.L2; suite.config.regularizationLambda = 0.01f; suite.config.learningRate = 0.01f; Console.WriteLine("Running training for {0} epochs!", epochs); Stopwatch sw = Stopwatch.StartNew(); int progress = 0; var promise = target_network.Train(suite, ComputeDeviceFactory.CreateFallbackComputeDevice()); Console.WriteLine("____________________"); while (!promise.IsReady()) { int progress_rounded = (int)(promise.GetTotalProgress() * 20); if (progress_rounded > progress) { ++progress; Console.Write("#"); } Thread.Sleep(50); } sw.Stop(); Console.WriteLine("#"); Console.WriteLine("Training finished! Elapsed={0}ms", sw.Elapsed.TotalMilliseconds); }
public static void TestTraining(Network network, float[] referenceOutput, IErrorFunction errorFunc, TrainingSuite.TrainingConfig.Regularization regularization, float regularizationLambda, float learningRate) { List <int> layerConfig = new List <int>(); layerConfig.Add(5); layerConfig.Add(33); layerConfig.Add(12); layerConfig.Add(51); layerConfig.Add(5); #region Training List <TrainingSuite.TrainingData> trainingData = new List <TrainingSuite.TrainingData>(); for (int i = 0; i < 1000; i++) { float[] input = new float[layerConfig[0]]; float[] desiredOutput = new float[layerConfig[layerConfig.Count - 1]]; input[(i * 13426) % 5] = 1.0f; desiredOutput[(i * 13426) % 5] = 1.0f; trainingData.Add(new TrainingSuite.TrainingData(input, desiredOutput)); } TrainingSuite suite = new TrainingSuite(trainingData); suite.config.epochs = 2; suite.config.shuffleTrainingData = false; suite.config.miniBatchSize = 13; suite.config.costFunction = errorFunc; suite.config.regularization = regularization; suite.config.regularizationLambda = regularizationLambda; suite.config.learningRate = learningRate; var promise = network.Train(suite, ComputeDeviceFactory.CreateFallbackComputeDevice()); promise.Await(); #endregion float[] testInput = new float[] { 0.3f, 0.4f, 0.6f, 0.1f, 0.5f }; var result = network.Compute(testInput, ComputeDeviceFactory.CreateFallbackComputeDevice()); Utils.CheckNetworkError(referenceOutput, result); }
public static void TestOpenCLTrainingWithConfig(IErrorFunction errorFunc, TrainingSuite.TrainingConfig.Regularization regularization, float regularizationLambda, float learningRate) { List <int> layerConfig = new List <int>(); layerConfig.Add(10); layerConfig.Add(512); layerConfig.Add(12); layerConfig.Add(3); layerConfig.Add(51); layerConfig.Add(30); Network networkReference = Network.CreateNetworkInitRandom(layerConfig.ToArray(), new SigmoidActivation()); var jsonData = networkReference.ExportToJSON(); Network networkCpuTrained = Network.CreateNetworkFromJSON(jsonData); Network networkOpenCLTrained = Network.CreateNetworkFromJSON(jsonData); var cpuCalculator = ComputeDeviceFactory.CreateFallbackComputeDevice(); var openCLCalculator = GetFirstOpenCLDevice(); var rnd = new Random(); List <TrainingSuite.TrainingData> trainingData = new List <TrainingSuite.TrainingData>(); for (int i = 0; i < 1000; i++) { float[] input = new float[layerConfig[0]]; float[] output = new float[layerConfig[layerConfig.Count - 1]]; var idx = rnd.Next(0, input.Length); input[rnd.Next(0, input.Length)] = 1.0f; for (int j = 0; j < 10; j++) { output[j * 3 + 0] = idx * 0.1f; output[j * 3 + 1] = 1.0f - (idx * 0.1f); output[j * 3 + 2] = idx * 0.05f; } trainingData.Add(new TrainingSuite.TrainingData(input, output)); } TrainingSuite suite = new TrainingSuite(trainingData); suite.config.epochs = 1; suite.config.shuffleTrainingData = false; suite.config.miniBatchSize = 13; suite.config.costFunction = errorFunc; suite.config.regularization = regularization; suite.config.regularizationLambda = regularizationLambda; suite.config.learningRate = learningRate; var promise1 = networkCpuTrained.Train(suite, cpuCalculator); var promise2 = networkOpenCLTrained.Train(suite, openCLCalculator); promise1.Await(); promise2.Await(); Assert.IsTrue(promise1.IsReady() && promise2.IsReady()); float[] testInput = new float[layerConfig[0]]; var cpuTrainedOutput = networkCpuTrained.Compute(testInput, cpuCalculator); var openCLTrainedOutput = networkOpenCLTrained.Compute(testInput, cpuCalculator); CheckNetworkError(cpuTrainedOutput, openCLTrainedOutput); }
public override unsafe List <List <NeuronData> > CalculateAccumulatedGradientForMinibatch(Network network, TrainingSuite suite, int trainingDataBegin, int trainingDataEnd) { int trainingSamples = trainingDataEnd - trainingDataBegin; var ret = Utils.CreateGradientVector(network); int[] networkConfigParams = null; int totalWeightAndBiasCount = 0; int delta_k_vectorSize = 0; int totalActivationCount = 0; //Add int inputActivationCount = network.layers.First().GetWeightsPerNeuron(); { foreach (var item in network.layers) { totalActivationCount += item.GetNeuronCount(); } List <int> networkConfigParamsList = new List <int>(); //0 networkConfigParamsList.Add(0); //layer index to be processed //1 networkConfigParamsList.Add(network.layers.Count); //Layer count //2 networkConfigParamsList.Add(trainingSamples); //numTrainingSamples //3 networkConfigParamsList.Add(network.activationFunction.GetOpenCLFunctionId()); //Activation function //4 networkConfigParamsList.Add(suite.config.costFunction.GetOpenCLFunctionID()); //Cost function //5 networkConfigParamsList.Add(totalActivationCount); //totalActivationCount //6 networkConfigParamsList.Add(0); //totalWeightsAndBiases //7 networkConfigParamsList.Add(0); //widestLayerNeuronCount //8 networkConfigParamsList.Add(network.layers.First().GetWeightsPerNeuron()); //Input count for (int i = 0; i < network.layers.Count; i++) { networkConfigParamsList.Add(network.layers[i].GetNeuronCount()); //Layer neuron count totalWeightAndBiasCount += network.layers[i].biases.Length; totalWeightAndBiasCount += network.layers[i].weightMx.Length; if (i > 0) //The first layer will not write the delta_k vector, so it shouldn't contribute to its size. { delta_k_vectorSize = Math.Max(network.layers[i].GetNeuronCount(), delta_k_vectorSize); } } networkConfigParamsList[6] = totalWeightAndBiasCount; networkConfigParamsList[7] = delta_k_vectorSize; networkConfigParams = networkConfigParamsList.ToArray(); } float[] desiredOutputs = new float[network.layers.Last().GetNeuronCount() * trainingSamples]; float[] outputGradient = new float[totalWeightAndBiasCount];//Memory layout is: [weights, biases for trainingsample0, layer0-N][weights, biases for trainingsample1, layer0-N] ... float[] inputParameters = new float[trainingSamples * inputActivationCount]; float[] weightsAndBiases = new float[totalWeightAndBiasCount]; fixed(int *networkConfigParamsPtr = networkConfigParams) { fixed(float *outputGradientPtr = outputGradient, desiredOutputsPtr = desiredOutputs, inputParametersPtr = inputParameters, weightsAndBiasesPtr = weightsAndBiases) { MemoryAllocation mem_NetworkConfigParams = computeFramework.GetMemoryFor(networkConfigParams.Length * 4, MemoryFlag.ReadOnly | MemoryFlag.CopyHostPointer, new IntPtr(networkConfigParamsPtr)); for (int i = 0; i < trainingSamples; ++i) { Buffer.BlockCopy(suite.trainingData[trainingDataBegin + i].input, 0, inputParameters, i * inputActivationCount * 4, inputActivationCount * 4); } MemoryAllocation mem_InputActivations = computeFramework.GetMemoryFor(inputParameters.Length * 4, MemoryFlag.ReadOnly | MemoryFlag.CopyHostPointer, new IntPtr(inputParametersPtr)); ///Contains the whole network's activation values, and Z values for each training sample ///Memory layout for one layer is like this: [...input values...][...first layer's activations...][...second layer's activations]...[last layer's activations][first layer's z values][second layer's zvalues]...[last layer's z values] ///After that, the next layer's same values are there MemoryAllocation mem_activationsAndZValues = computeFramework.GetMemoryFor(totalActivationCount * trainingSamples * 2 * 4, MemoryFlag.ReadWrite, IntPtr.Zero); { int offset = 0; foreach (var layer in network.layers) { Buffer.BlockCopy(layer.weightMx, 0, weightsAndBiases, offset, layer.weightMx.Length * 4); offset += layer.weightMx.Length * 4; Buffer.BlockCopy(layer.biases, 0, weightsAndBiases, offset, layer.biases.Length * 4); offset += layer.biases.Length * 4; } } MemoryAllocation mem_weightsAndBiases = computeFramework.GetMemoryFor(weightsAndBiases.Length * 4, MemoryFlag.ReadOnly | MemoryFlag.CopyHostPointer, new IntPtr(weightsAndBiasesPtr)); //delta_k_vector is double buffered (hence the * 2). In a pass, the previous delta_k values are read, and the next ones are written //Memory layout is: [delta_k_vector buffer1 of trainingSample0][delta_k_vector buffer2 of trainingSample0] [delta_k_vector buffer1 of trainingSample1][delta_k_vector buffer2 of trainingSample1] ... MemoryAllocation mem_delta_k_vector = computeFramework.GetMemoryFor(Math.Max(1, delta_k_vectorSize * trainingSamples * 2 * 4), MemoryFlag.ReadWrite, IntPtr.Zero); computeFramework.SetKernelArg(forwardPass, 0, mem_NetworkConfigParams); computeFramework.SetKernelArg(forwardPass, 1, mem_activationsAndZValues); computeFramework.SetKernelArg(forwardPass, 2, mem_InputActivations); computeFramework.SetKernelArg(forwardPass, 3, mem_weightsAndBiases); int[] layerIndexBuffer = new int[network.layers.Count]; for (int i = 0; i < layerIndexBuffer.Length; ++i) { layerIndexBuffer[i] = i; } var localWorkGroupSize = new IntPtr[] { new IntPtr(deviceConfig.idealWorkgroupSizeX), new IntPtr(deviceConfig.idealWorkgroupSizeY) }; var globalWorkSize = new IntPtr[] { new IntPtr(0), new IntPtr(ExtendGlobalWorkSize(trainingSamples, localWorkGroupSize[1].ToInt32())) }; #region Forward pass for (int i = 0; i < network.layers.Count; ++i) { if (i > 0) { computeFramework.UploadToMemory(mem_NetworkConfigParams, i, layerIndexBuffer, false, 1); //Update layer index to be processed by the kernel } globalWorkSize[0] = new IntPtr(ExtendGlobalWorkSize(network.layers[i].GetNeuronCount(), localWorkGroupSize[0].ToInt32())); computeFramework.EnqueueKernel(forwardPass, globalWorkSize, localWorkGroupSize); // todo: run forward pass } #endregion #region backward pass int desiredOutputByteSizePerTrainigSample = network.layers.Last().GetNeuronCount() * 4; for (int i = 0; i < trainingSamples; ++i) { Buffer.BlockCopy(suite.trainingData[trainingDataBegin + i].desiredOutput, 0, desiredOutputs, i * desiredOutputByteSizePerTrainigSample, desiredOutputByteSizePerTrainigSample); } var mem_desired_outputs = computeFramework.GetMemoryFor(desiredOutputs.Length * 4, MemoryFlag.ReadOnly | MemoryFlag.CopyHostPointer, new IntPtr(desiredOutputsPtr)); var mem_param_gradient = computeFramework.GetMemoryFor(outputGradient.Length * 4, MemoryFlag.ReadWrite | MemoryFlag.CopyHostPointer, new IntPtr(outputGradientPtr)); computeFramework.SetKernelArg(backwardPassKernel, 0, mem_NetworkConfigParams); computeFramework.SetKernelArg(backwardPassKernel, 1, mem_activationsAndZValues); computeFramework.SetKernelArg(backwardPassKernel, 2, mem_delta_k_vector); computeFramework.SetKernelArg(backwardPassKernel, 3, mem_param_gradient); computeFramework.SetKernelArg(backwardPassKernel, 4, mem_desired_outputs); computeFramework.SetKernelArg(backwardPassKernel, 5, mem_InputActivations); computeFramework.SetKernelArg(backwardPassKernel, 6, mem_weightsAndBiases); //Run backward pass for all hidden layers for (int i = network.layers.Count - 1; i >= 0; --i) { globalWorkSize[0] = new IntPtr(ExtendGlobalWorkSize(network.layers[i].GetNeuronCount(), localWorkGroupSize[0].ToInt32())); if (i != network.layers.Count - 1) { computeFramework.UploadToMemory(mem_NetworkConfigParams, i, layerIndexBuffer, false, 1); //Update layer index to be processed by the kernel } computeFramework.EnqueueKernel(backwardPassKernel, globalWorkSize, localWorkGroupSize); } #endregion computeFramework.FlushCommandBuffer(); computeFramework.ReadBuffer(mem_param_gradient, true, new UIntPtr(0), new UIntPtr(mem_param_gradient.bufferSizeInBytes), new IntPtr(outputGradientPtr)); } } computeFramework.UnuseMemoryAllocations(); int gradIdx = 0; foreach (var layer in ret) { foreach (var neuron in layer) { Buffer.BlockCopy(outputGradient, gradIdx * 4, neuron.weights, 0, neuron.weights.Length * 4); gradIdx += neuron.weights.Length; neuron.bias = outputGradient[gradIdx]; ++gradIdx; } } return(ret); }
public override List <List <NeuronData> > CalculateAccumulatedGradientForMinibatch(Network network, TrainingSuite suite, int trainingDataBegin, int trainingDataEnd) { List <List <NeuronData> > ret = null; foreach (var device in devices) { var result = device.CalculateAccumulatedGradientForMinibatch(network, suite, trainingDataBegin, trainingDataEnd); if (ret != null) { Utils.ValidateGradient(ret, result, 0.00001); } ret = result; } return(ret); }
public static void TestOpenCLTrainingWithConfig(IErrorFunction errorFunc, TrainingConfig.Regularization regularization, float regularizationLambda, float learningRate, bool mix_activations = false) { IActivationFunction alternateActivation = new SigmoidActivation(); if (mix_activations) { alternateActivation = new ReLUActivation(); } int input_neurons = 10; var layer_config = new List <Tuple <IActivationFunction, int> >(); layer_config.Add(new Tuple <IActivationFunction, int>(new SigmoidActivation(), 512)); layer_config.Add(new Tuple <IActivationFunction, int>(alternateActivation, 12)); layer_config.Add(new Tuple <IActivationFunction, int>(new SigmoidActivation(), 3)); layer_config.Add(new Tuple <IActivationFunction, int>(alternateActivation, 51)); layer_config.Add(new Tuple <IActivationFunction, int>(new SigmoidActivation(), 30)); Network networkReference = Network.CreateNetworkInitRandom(input_neurons, layer_config); var jsonData = networkReference.ExportToJSON(); Network networkCpuTrained = Network.CreateNetworkFromJSON(jsonData); Network networkOpenCLTrained = Network.CreateNetworkFromJSON(jsonData); var cpuCalculator = ComputeDeviceFactory.CreateFallbackComputeDevice(); var openCLCalculator = GetFirstOpenCLDevice(); var rnd = new Random(); List <TrainingSuite.TrainingData> trainingData = new List <TrainingSuite.TrainingData>(); for (int i = 0; i < 1000; i++) { float[] input = new float[input_neurons]; float[] output = new float[layer_config.Last().Item2]; var idx = rnd.Next(0, input.Length); input[rnd.Next(0, input.Length)] = 1.0f; for (int j = 0; j < 10; j++) { output[j * 3 + 0] = idx * 0.1f; output[j * 3 + 1] = 1.0f - (idx * 0.1f); output[j * 3 + 2] = idx * 0.05f; } trainingData.Add(new TrainingSuite.TrainingData(input, output)); } TrainingSuite suite = new TrainingSuite(trainingData); suite.config.epochs = 1; suite.config.shuffleTrainingData = false; suite.config.miniBatchSize = 13; suite.config.costFunction = errorFunc; suite.config.regularization = regularization; suite.config.regularizationLambda = regularizationLambda; suite.config.learningRate = learningRate; var promise1 = networkCpuTrained.Train(suite, cpuCalculator); var promise2 = networkOpenCLTrained.Train(suite, openCLCalculator); promise1.Await(); promise2.Await(); Assert.IsTrue(promise1.IsReady() && promise2.IsReady()); float[] testInput = new float[input_neurons]; var cpuTrainedOutput = networkCpuTrained.Compute(testInput, cpuCalculator); var openCLTrainedOutput = networkOpenCLTrained.Compute(testInput, cpuCalculator); ValidateFloatArray(cpuTrainedOutput, openCLTrainedOutput); }
private void Button_Click(object sender, RoutedEventArgs e) { if (network == null) { return; } var trainingFileContent = OpenFileWithDialog("Open news training data", "*.csv|*.csv"); if (trainingFileContent == null) { return; } var trainingData = new List <TrainingSuite.TrainingData>(); FillTrainingDataFromFile(ref trainingData, trainingFileContent, network.GetInputSize()); var trainingSuite = new TrainingSuite(trainingData); trainingSuite.config.learningRate = float.Parse(txtLearningRate.Text); trainingSuite.config.epochs = int.Parse(txtEpochs.Text); trainingSuite.config.miniBatchSize = int.Parse(txtMiniBatches.Text); trainingSuite.config.regularizationLambda = float.Parse(txtRegularizationLambda.Text); trainingSuite.config.shuffleTrainingData = chkShuffleMinibatches.IsChecked == true; switch (cmbCostFunction.SelectedIndex) { case 1: trainingSuite.config.costFunction = new MeanSquaredErrorFunction(); break; case 0: default: trainingSuite.config.costFunction = new CrossEntropyErrorFunction(); break; } switch (cmbRegularization.SelectedIndex) { case 0: trainingSuite.config.regularization = TrainingSuite.TrainingConfig.Regularization.None; break; case 1: trainingSuite.config.regularization = TrainingSuite.TrainingConfig.Regularization.L1; break; case 2: default: trainingSuite.config.regularization = TrainingSuite.TrainingConfig.Regularization.L2; break; } var calculator = GetCalculator(cmbComputeDevice.SelectedIndex); var trainingProcess = network.Train(trainingSuite, calculator); var dialogWnd = new TrainingDialog(trainingProcess); dialogWnd.ShowDialog(); }
private void Button9_Click(object sender, EventArgs e) { System.IO.File.WriteAllText("D:\\nntmp\\log.txt", "BEGIN\n"); string imgFile = ""; string labelFile = ""; string testImgFile = ""; string testLabelFile = ""; openFileDialog1.Filter = "Image Training data (Image)|*.*"; openFileDialog1.Title = "Open Training images file"; if (openFileDialog1.ShowDialog() == DialogResult.OK) { imgFile = openFileDialog1.FileName; } else { return; } openFileDialog1.Filter = "Training data (Label)|*.*"; openFileDialog1.Title = "Open Training labels file"; if (openFileDialog1.ShowDialog() == DialogResult.OK) { labelFile = openFileDialog1.FileName; } else { return; } openFileDialog1.Filter = "Verification Image Training data (Image)|*.*"; openFileDialog1.Title = "Open Verification images file"; if (openFileDialog1.ShowDialog() == DialogResult.OK) { testImgFile = openFileDialog1.FileName; } else { return; } openFileDialog1.Filter = "Verification Training data (Label)|*.*"; openFileDialog1.Title = "Open Verification labels file"; if (openFileDialog1.ShowDialog() == DialogResult.OK) { testLabelFile = openFileDialog1.FileName; } else { return; } LoadingWindow wnd = new LoadingWindow(); wnd.Text = "Loading training data"; List <TrainingSuite.TrainingData> trainingData = new List <TrainingSuite.TrainingData>(); List <TrainingSuite.TrainingData> testData = new List <TrainingSuite.TrainingData>(); System.Threading.Thread thread = new System.Threading.Thread(() => { LoadTestDataFromFiles(trainingData, labelFile, imgFile, (x) => { wnd.SetProgress(x); }, true); LoadTestDataFromFiles(testData, testLabelFile, testImgFile, (x) => { wnd.SetProgress(x); }, false); wnd.Finish(); }); thread.Start(); if (wnd.ShowDialog() != DialogResult.OK) { return; } int success = 0; for (int i = 0; i < testData.Count; i++) { var output = network.Compute(testData[i].input, calculator); int resultIdx = ClassifyOutput(output); int expectedIdx = ClassifyOutput(testData[i].desiredOutput); if (resultIdx == expectedIdx) { ++success; } } network.AttachDescription("Network (" + string.Join(",", network.GetLayerConfig()) + ") epoch: 0 (initial) Test success rate: [" + success + " of " + testData.Count + "]"); System.IO.File.WriteAllText("D:\\nntmp\\network_000000.json", network.ExportToJSON()); VisualizeNetworkSpecific(network, "D:\\nntmp\\network_000000_vis_", 0, ((float)success / testData.Count) * 100.0f); var trainingSuite = new TrainingSuite(trainingData); trainingSuite.config.miniBatchSize = (int)numMiniBatchSize.Value; trainingSuite.config.learningRate = (float)numLearningRate.Value; trainingSuite.config.regularizationLambda = (float)numLambda.Value; trainingSuite.config.shuffleTrainingData = true; if (comboRegularization.SelectedIndex == 0) { trainingSuite.config.regularization = TrainingSuite.TrainingConfig.Regularization.None; } else if (comboRegularization.SelectedIndex == 0) { trainingSuite.config.regularization = TrainingSuite.TrainingConfig.Regularization.L1; } else if (comboRegularization.SelectedIndex == 0) { trainingSuite.config.regularization = TrainingSuite.TrainingConfig.Regularization.L2; } if (comboCostFunction.SelectedIndex == 0) { trainingSuite.config.costFunction = new MeanSquaredErrorFunction(); } else if (comboCostFunction.SelectedIndex == 1) { trainingSuite.config.costFunction = new CrossEntropyErrorFunction(); } trainingSuite.config.epochs = 1; LogDebug("Initial network saved " + DateTime.Now.ToString()); for (int epoch = 0; epoch < (int)numEpoch.Value; epoch++) { LogDebug("Starting epoch #" + (epoch + 1) + " " + DateTime.Now.ToString()); trainingPromise = network.Train(trainingSuite, calculator); trainingPromise.Await(); LogDebug(" Training finished " + DateTime.Now.ToString()); success = 0; for (int i = 0; i < testData.Count; i++) { var output = network.Compute(testData[i].input, calculator); int resultIdx = ClassifyOutput(output); int expectedIdx = ClassifyOutput(testData[i].desiredOutput); if (resultIdx == expectedIdx) { ++success; } } LogDebug(" Verification finished Success rate: [" + success + " of " + testData.Count + "] " + DateTime.Now.ToString()); network.AttachDescription("Network (" + string.Join(",", network.GetLayerConfig()) + ") epoch: " + (epoch + 1) + " Test success rate: [" + success + " of " + testData.Count + "]"); System.IO.File.WriteAllText("D:\\nntmp\\network_" + (epoch + 1).ToString().PadLeft(6, '0') + ".json", network.ExportToJSON()); VisualizeNetworkSpecific(network, "D:\\nntmp\\network_" + (epoch + 1).ToString().PadLeft(6, '0') + "_vis_", epoch + 1, ((float)success / testData.Count) * 100.0f); LogDebug(" Saving finished " + DateTime.Now.ToString()); } }
private void button4_Click(object sender, EventArgs e) { string imgFile = ""; string labelFile = ""; openFileDialog1.Filter = "Image Training data (Image)|*.*"; openFileDialog1.Title = "Open Training images file"; if (openFileDialog1.ShowDialog() == DialogResult.OK) { imgFile = openFileDialog1.FileName; } else { return; } openFileDialog1.Filter = "Training data (Label)|*.*"; openFileDialog1.Title = "Open Training labels file"; if (openFileDialog1.ShowDialog() == DialogResult.OK) { labelFile = openFileDialog1.FileName; } else { return; } LoadingWindow wnd = new LoadingWindow(); wnd.Text = "Loading training data"; List <TrainingSuite.TrainingData> trainingData = new List <TrainingSuite.TrainingData>(); System.Threading.Thread thread = new System.Threading.Thread(() => { LoadTestDataFromFiles(trainingData, labelFile, imgFile, (x) => { wnd.SetProgress(x); }, true); wnd.Finish(); }); thread.Start(); if (wnd.ShowDialog() != DialogResult.OK) { return; } var trainingSuite = new TrainingSuite(trainingData); trainingSuite.config.miniBatchSize = (int)numMiniBatchSize.Value; trainingSuite.config.learningRate = (float)numLearningRate.Value; trainingSuite.config.regularizationLambda = (float)numLambda.Value; trainingSuite.config.shuffleTrainingData = checkShuffle.Checked; if (comboRegularization.SelectedIndex == 0) { trainingSuite.config.regularization = TrainingSuite.TrainingConfig.Regularization.None; } else if (comboRegularization.SelectedIndex == 0) { trainingSuite.config.regularization = TrainingSuite.TrainingConfig.Regularization.L1; } else if (comboRegularization.SelectedIndex == 0) { trainingSuite.config.regularization = TrainingSuite.TrainingConfig.Regularization.L2; } if (comboCostFunction.SelectedIndex == 0) { trainingSuite.config.costFunction = new MeanSquaredErrorFunction(); } else if (comboCostFunction.SelectedIndex == 1) { trainingSuite.config.costFunction = new CrossEntropyErrorFunction(); } trainingSuite.config.epochs = (int)numEpoch.Value; trainingStart = DateTime.Now; trainingPromise = network.Train(trainingSuite, calculator); trainingtimer.Start(); progressDialog = new TrainingWindow(trainingPromise); progressDialog.ShowDialog(); }