public void Train(Network network, TrainingCallback callback) { IActivationFunction activationFunctionInput = network.GetActivation(0); int outputNeurons = network.GetLayerNeuronCount(network.LayerCount - 1); double error = 0; callback.Invoke(TrainingStatus.FillingStandardInputs, 0, 0, 0); /*First operation is filling standard input/outputs*/ Dictionary<int, List<BasicMLData>> trackIdFingerprints = GetNormalizedTrackFingerprints(activationFunctionInput, trainingSongSnippets, outputNeurons); workingThread = Thread.CurrentThread; IActivationFunction activationFunctionOutput = network.GetActivation(network.LayerCount - 1); double[][] normalizedBinaryCodes = GetNormalizedBinaryCodes(activationFunctionOutput, outputNeurons); Tuple<double[][], double[][]> tuple = FillStandardInputsOutputs(trackIdFingerprints, normalizedBinaryCodes); /*Fill standard input output*/ double[][] inputs = tuple.Item1; double[][] outputs = tuple.Item2; if (inputs == null || outputs == null) { callback.Invoke(TrainingStatus.Exception, 0, 0, 0); return; } int currentIterration = 0; double correctOutputs = 0.0; BasicNeuralDataSet dataset = new BasicNeuralDataSet(inputs, outputs); ITrain learner = new ResilientPropagation(network, dataset); try { // Dynamic output reordering cycle /*Idyn = 50*/ for (int i = 0; i < Idyn; i++) { if (paused) { pauseSem.WaitOne(); } correctOutputs = NetworkPerformanceMeter.MeasurePerformance(network, dataset); callback.Invoke(TrainingStatus.OutputReordering, correctOutputs, error, currentIterration); ReorderOutput(network, dataset, trackIdFingerprints, normalizedBinaryCodes); /*Edyn = 10*/ for (int j = 0; j < Edyn; j++) { if (paused) { pauseSem.WaitOne(); } correctOutputs = NetworkPerformanceMeter.MeasurePerformance(network, dataset); callback.Invoke(TrainingStatus.RunningDynamicEpoch, correctOutputs, error, currentIterration); learner.Iteration(); error = learner.Error; currentIterration++; } } for (int i = 0; i < Efixed; i++) { if (paused) { pauseSem.WaitOne(); } correctOutputs = NetworkPerformanceMeter.MeasurePerformance(network, dataset); callback.Invoke(TrainingStatus.FixedTraining, correctOutputs, error, currentIterration); learner.Iteration(); error = learner.Error; currentIterration++; } network.ComputeMedianResponses(inputs, trainingSongSnippets); callback.Invoke(TrainingStatus.Finished, correctOutputs, error, currentIterration); } catch (ThreadAbortException) { callback.Invoke(TrainingStatus.Aborted, correctOutputs, error, currentIterration); paused = false; } }
public void Train(Network network, TrainingCallback callback) { IActivationFunction activationFunctionInput = network.GetActivation(0); int outputNeurons = network.GetLayerNeuronCount(network.LayerCount - 1); double error = 0; callback.Invoke(TrainingStatus.FillingStandardInputs, 0, 0, 0); /*First operation is filling standard input/outputs*/ Dictionary <int, List <BasicMLData> > trackIdFingerprints = GetNormalizedTrackFingerprints(activationFunctionInput, trainingSongSnippets, outputNeurons); workingThread = Thread.CurrentThread; IActivationFunction activationFunctionOutput = network.GetActivation(network.LayerCount - 1); double[][] normalizedBinaryCodes = GetNormalizedBinaryCodes(activationFunctionOutput, outputNeurons); Tuple <double[][], double[][]> tuple = FillStandardInputsOutputs(trackIdFingerprints, normalizedBinaryCodes); /*Fill standard input output*/ double[][] inputs = tuple.Item1; double[][] outputs = tuple.Item2; if (inputs == null || outputs == null) { callback.Invoke(TrainingStatus.Exception, 0, 0, 0); return; } int currentIterration = 0; double correctOutputs = 0.0; BasicNeuralDataSet dataset = new BasicNeuralDataSet(inputs, outputs); ITrain learner = new ResilientPropagation(network, dataset); try { // Dynamic output reordering cycle /*Idyn = 50*/ for (int i = 0; i < Idyn; i++) { if (paused) { pauseSem.WaitOne(); } correctOutputs = NetworkPerformanceMeter.MeasurePerformance(network, dataset); callback.Invoke(TrainingStatus.OutputReordering, correctOutputs, error, currentIterration); ReorderOutput(network, dataset, trackIdFingerprints, normalizedBinaryCodes); /*Edyn = 10*/ for (int j = 0; j < Edyn; j++) { if (paused) { pauseSem.WaitOne(); } correctOutputs = NetworkPerformanceMeter.MeasurePerformance(network, dataset); callback.Invoke(TrainingStatus.RunningDynamicEpoch, correctOutputs, error, currentIterration); learner.Iteration(); error = learner.Error; currentIterration++; } } for (int i = 0; i < Efixed; i++) { if (paused) { pauseSem.WaitOne(); } correctOutputs = NetworkPerformanceMeter.MeasurePerformance(network, dataset); callback.Invoke(TrainingStatus.FixedTraining, correctOutputs, error, currentIterration); learner.Iteration(); error = learner.Error; currentIterration++; } network.ComputeMedianResponses(inputs, trainingSongSnippets); callback.Invoke(TrainingStatus.Finished, correctOutputs, error, currentIterration); } catch (ThreadAbortException) { callback.Invoke(TrainingStatus.Aborted, correctOutputs, error, currentIterration); paused = false; } }