/// <summary> /// Construct a input field based on a NeuralDataSet. /// </summary> /// <param name="usedForNetworkInput">Is this field used for neural input.</param> /// <param name="data">The data set to use.</param> /// <param name="offset">The input or ideal index to use. This treats the input /// and ideal as one long array, concatenated together.</param> public InputFieldNeuralDataSet(bool usedForNetworkInput, INeuralDataSet data, int offset) { this.data = data; this.offset = offset; UsedForNetworkInput = usedForNetworkInput; }
/// <summary> /// Create an instance of competitive training. /// </summary> /// <param name="network">The network to train.</param> /// <param name="learningRate">The learning rate, how much to apply per iteration.</param> /// <param name="training">The training set (unsupervised).</param> /// <param name="neighborhood">The neighborhood function to use.</param> public CompetitiveTraining(BasicNetwork network, double learningRate, INeuralDataSet training, INeighborhoodFunction neighborhood) { this.neighborhood = neighborhood; Training = training; this.LearningRate = learningRate; this.network = network; this.inputLayer = network.GetLayer(BasicNetwork.TAG_INPUT); this.outputLayer = network.GetLayer(BasicNetwork.TAG_OUTPUT); this.synapses = network.Structure.GetPreviousSynapses( this.outputLayer); this.inputNeuronCount = this.inputLayer.NeuronCount; this.outputNeuronCount = this.outputLayer.NeuronCount; this.ForceWinner = false; Error = 0; // setup the correction matrix foreach (ISynapse synapse in this.synapses) { Matrix matrix = new Matrix(synapse.WeightMatrix.Rows, synapse.WeightMatrix.Cols); this.correctionMatrix[synapse] = matrix; } // create the BMU class this.bmuUtil = new BestMatchingUnit(this); }
/// <summary> /// Construct a Hopfield training class. /// </summary> /// <param name="trainingSet">The training set to use.</param> /// <param name="network">The network to train.</param> public TrainHopfield(INeuralDataSet trainingSet, BasicNetwork network) { this.network = network; this.Training = trainingSet; this.Error = 0; }
/// <summary> /// The network that is to be trained. /// </summary> /// <param name="network">The training set.</param> /// <param name="training">The OpenCL profile to use, null for CPU.</param> /// <param name="profile">The OpenCL profile, or null for none.</param> /// <param name="learnRate">The rate at which the weight matrix will be adjusted based on /// learning.</param> /// <param name="momentum">The influence that previous iteration's training deltas will /// have on the current iteration.</param> public Backpropagation(BasicNetwork network, INeuralDataSet training, OpenCLTrainingProfile profile, double learnRate, double momentum) : base(network, training) { if (profile == null) { TrainFlatNetworkBackPropagation backFlat = new TrainFlatNetworkBackPropagation( network.Structure.Flat, this.Training, learnRate, momentum); this.FlatTraining = backFlat; } #if !SILVERLIGHT else { TrainFlatNetworkOpenCL rpropFlat = new TrainFlatNetworkOpenCL( network.Structure.Flat, this.Training, profile); rpropFlat.LearnBPROP(learnRate, momentum); this.FlatTraining = rpropFlat; } #endif }
/// <summary> /// Train the network, using the specified training algorithm, and send the /// output to the console. /// </summary> /// <param name="train">The training method to use.</param> /// <param name="network">The network to train.</param> /// <param name="trainingSet">The training set.</param> /// <param name="minutes">The number of minutes to train for.</param> public static void TrainConsole(ITrain train, BasicNetwork network, INeuralDataSet trainingSet, int minutes) { int epoch = 1; long remaining; Console.WriteLine("Beginning training..."); long start = Environment.TickCount; do { train.Iteration(); long current = Environment.TickCount; long elapsed = (current - start) / 1000; remaining = minutes - elapsed / 60; Console.WriteLine("Iteration #" + Format.FormatInteger(epoch) + " Error:" + Format.FormatPercent(train.Error) + " elapsed time = " + Format.FormatTimeSpan((int)elapsed) + " time left = " + Format.FormatTimeSpan((int)remaining * 60)); epoch++; } while (remaining > 0 && !train.TrainingDone); train.FinishTraining(); }
/// <summary> /// Construct a new NeuralDataSet based on the parameters specified. /// </summary> /// <param name="inputCount">The input count.</param> /// <param name="idealCount">The output count.</param> public NormalizationStorageNeuralDataSet(int inputCount, int idealCount) { this.inputCount = inputCount; this.idealCount = idealCount; this.dataset = new BasicNeuralDataSet(); }
/// <summary> /// Encode the Encog dataset. /// </summary> /// <param name="training">The training data.</param> /// <param name="outputIndex">The ideal element to use, this is necessary becase SVM's have /// only a single output.</param> /// <returns>The SVM problem.</returns> public static svm_problem Encode(INeuralDataSet training, int outputIndex) { svm_problem result = new svm_problem(); result.l = (int)ObtainTrainingLength(training); result.y = new double[result.l]; result.x = new svm_node[result.l][]; int elementIndex = 0; foreach (INeuralDataPair pair in training) { INeuralData input = pair.Input; INeuralData output = pair.Ideal; result.x[elementIndex] = new svm_node[input.Count]; for (int i = 0; i < input.Count; i++) { result.x[elementIndex][i] = new svm_node(); result.x[elementIndex][i].index = i + 1; result.x[elementIndex][i].value_Renamed = input[i]; } result.y[elementIndex] = output[outputIndex]; elementIndex++; } return result; }
/// <summary> /// Construct the LMA object. /// </summary> /// <param name="network">The network to train. Must have a single output neuron.</param> /// <param name="training">The training data to use. Must be indexable.</param> public SVDTraining(BasicNetwork network, INeuralDataSet training) { ILayer outputLayer = network.GetLayer(BasicNetwork.TAG_OUTPUT); if (outputLayer == null) { throw new TrainingError("SVD requires an output layer."); } if (outputLayer.NeuronCount != 1) { throw new TrainingError("SVD requires an output layer with a single neuron."); } if (network.GetLayer(RadialBasisPattern.RBF_LAYER) == null) throw new TrainingError("SVD is only tested to work on radial basis function networks."); rbfLayer = (RadialBasisFunctionLayer)network.GetLayer(RadialBasisPattern.RBF_LAYER); this.Training = training; this.network = network; this.trainingLength = (int)this.Training.InputSize; BasicNeuralData input = new BasicNeuralData(this.Training.InputSize); BasicNeuralData ideal = new BasicNeuralData(this.Training.IdealSize); this.pair = new BasicNeuralDataPair(input, ideal); }
/// <summary> /// Construct an RPROP job. For more information on RPROP see the /// ResilientPropagation class. /// </summary> /// <param name="network">The network to train.</param> /// <param name="training">The training data to use.</param> /// <param name="loadToMemory">True if binary training data should be loaded to memory.</param> /// <param name="localRatio">The local ratio, used if this job is performed by an OpenCL Device.</param> /// <param name="globalRatio">The global ratio, used if this job is performed by an OpenCL Device.</param> /// <param name="segmentationRatio">The segmentation ratio, used if this job is performed by an OpenCL Device.</param> /// <param name="iterationsPer">How many iterations to process per cycle.</param> public RPROPJob(BasicNetwork network, INeuralDataSet training, bool loadToMemory, double localRatio, int globalRatio, double segmentationRatio, int iterationsPer) : this(network, training, loadToMemory, RPROPConst.DEFAULT_INITIAL_UPDATE, RPROPConst.DEFAULT_MAX_STEP, localRatio, globalRatio, segmentationRatio, iterationsPer) { }
/// <summary> /// /// </summary> /// <param name="obj"></param> /// <param name="xmlOut"></param> public void Save(IEncogPersistedObject obj, WriteXML xmlOut) { PersistorUtil.BeginEncogObject(EncogPersistedCollection.TYPE_TRAINING, xmlOut, obj, true); INeuralDataSet set = (INeuralDataSet)obj; StringBuilder builder = new StringBuilder(); foreach (INeuralDataPair pair in set) { xmlOut.BeginTag(BasicNeuralDataSetPersistor.TAG_ITEM); NumberList.ToList(CSVFormat.EG_FORMAT, builder, pair.Input.Data); xmlOut.AddProperty(BasicNeuralDataSetPersistor.TAG_INPUT, builder .ToString()); if (pair.Ideal != null) { NumberList.ToList(CSVFormat.EG_FORMAT, builder, pair.Ideal.Data); xmlOut.AddProperty(BasicNeuralDataSetPersistor.TAG_IDEAL, builder .ToString()); } xmlOut.EndTag(); } xmlOut.EndTag(); }
public static float[] learningFactor = { 0.01f };// ,0.001f, 0.1f }; public static void RunTests(INeuralDataSet learningSet, INeuralDataSet testingSet, int inputSize, int testingSize, List <Face> learningFaces, List <Face> testingFaces) { hiddenNeurons_test = initList(); int counter = 0; for (int i = 0; i < iterations.Length; i++) { for (int h = 0; h < hiddenLayers_test.Length; h++) { for (int n = 0; n < hiddenNeurons_test.Count; n++) { if (hiddenNeurons_test[n].Length == hiddenLayers_test[h]) { for (int l = 0; l < learningFactor.Length; l++) { counter++; InputClass configuration = new InputClass(); configuration.hiddenLayers = hiddenLayers_test[h]; configuration.hiddenNeurons = hiddenNeurons_test[n]; configuration.activationFunction = new ActivationSigmoid(); configuration.bias = true; configuration.iterations = iterations[i]; configuration.learningFactor = (double)learningFactor[l]; configuration.momentum = 0.4; Learn(learningSet, testingSet, learningFaces[0].Features.Count, configuration, testingFaces.Count, counter); } } } } } }
/// <summary> /// Construct the LMA object. /// </summary> /// <param name="network">The network to train. Must have a single output neuron.</param> /// <param name="training">The training data to use. Must be indexable.</param> public SVDTraining(BasicNetwork network, INeuralDataSet training) { ILayer outputLayer = network.GetLayer(BasicNetwork.TAG_OUTPUT); if (outputLayer == null) { throw new TrainingError("SVD requires an output layer."); } if (outputLayer.NeuronCount != 1) { throw new TrainingError("SVD requires an output layer with a single neuron."); } if (network.GetLayer(RadialBasisPattern.RBF_LAYER) == null) { throw new TrainingError("SVD is only tested to work on radial basis function networks."); } rbfLayer = (RadialBasisFunctionLayer)network.GetLayer(RadialBasisPattern.RBF_LAYER); this.Training = training; this.network = network; this.trainingLength = (int)this.Training.InputSize; BasicNeuralData input = new BasicNeuralData(this.Training.InputSize); BasicNeuralData ideal = new BasicNeuralData(this.Training.IdealSize); this.pair = new BasicNeuralDataPair(input, ideal); }
public static double GetNetworkDataSetError(Backpropagation network, INeuralDataSet dataSet, int answersSize) { double[] neuralAnswer = new double[dataSet.Count]; int i = 0; foreach (var pair in dataSet) { double[] output = new double[answersSize]; network.Network.Flat.Compute(pair.Input, output); if (answersSize != 0) { double small = 0.0; for (int r = 0; r < answersSize; r++) { if (output[r] >= small) { neuralAnswer[i] = r; small = output[r]; } } } else { neuralAnswer[i] = output[0]; } i++; } int[] answers = DenormaliseAnswers(neuralAnswer, answersSize); //Console.WriteLine("Neural Network Results"); double calculateError = CalculateFinalError(answers, dataSet, answersSize); return(calculateError); }
/// <summary> /// Evaluate how long it takes to calculate the error for the network. This /// causes each of the training pairs to be run through the network. The /// network is evaluated 10 times and the lowest time is reported. /// </summary> /// <param name="profile">The network to evaluate with.</param> /// <param name="network">The training data to use.</param> /// <param name="training">The number of seconds that it took.</param> /// <returns></returns> public static int EvaluateTrain(OpenCLTrainingProfile profile, BasicNetwork network, INeuralDataSet training) { // train the neural network ITrain train; if (profile == null) { train = new ResilientPropagation(network, training); } else { train = new ResilientPropagation( network, training, profile, RPROPConst.DEFAULT_INITIAL_UPDATE, RPROPConst.DEFAULT_MAX_STEP); } int iterations = 0; Stopwatch watch = new Stopwatch(); watch.Start(); while (watch.ElapsedMilliseconds < (10 * MILIS)) { iterations++; train.Iteration(); } return(iterations); }
/// <summary> /// Generate random training into a training set. /// </summary> /// <param name="training">The training set to generate into.</param> /// <param name="seed">The seed to use.</param> /// <param name="count">How much data to generate.</param> /// <param name="min">The low random value.</param> /// <param name="max">The high random value.</param> public static void Generate(INeuralDataSet training, long seed, int count, double min, double max) { LinearCongruentialGenerator rand = new LinearCongruentialGenerator(seed); int inputCount = training.InputSize; int idealCount = training.IdealSize; for (int i = 0; i < count; i++) { INeuralData inputData = new BasicNeuralData(inputCount); for (int j = 0; j < inputCount; j++) { inputData[j] = rand.Range(min, max); } INeuralData idealData = new BasicNeuralData(idealCount); for (int j = 0; j < idealCount; j++) { idealData[j] = rand.Range(min, max); } BasicNeuralDataPair pair = new BasicNeuralDataPair(inputData, idealData); training.Add(pair); } }
public static ITrain CreateNeuronNetwork(INeuralDataSet learningSet, int inputSize, InputClass inputData) { BasicNetwork network = new BasicNetwork(); //------------------------------------------------------------------------------------------ int[] szerokosc = inputData.hiddenNeurons; int dlugosc = inputData.hiddenLayers; bool bias = inputData.bias; IActivationFunction ActivationFunction = inputData.activationFunction; double learning = inputData.learningFactor; double momentum = inputData.momentum; //----------------------------------------------------------------------------------------- network.AddLayer(new BasicLayer(ActivationFunction, bias, inputSize)); for (int i = 0; i < dlugosc; i++) { network.AddLayer(new BasicLayer(ActivationFunction, bias, szerokosc[i])); } network.AddLayer(new BasicLayer(ActivationFunction, false, 4)); network.Structure.FinalizeStructure(); network.Reset(); ITrain train = new Backpropagation(network, learningSet, learning, momentum); return(train); }
public static double CalculateFinalError(int[] answers, INeuralDataSet testingSet) { int properAnswer = 0; double[] neuralAnswers = new double[answers.Count()]; int j = 0; foreach (INeuralDataPair pair in testingSet) { for (int r = 0; r < 4; r++) { if ((double)(pair.Ideal.Data[r]) >= 0.66) { neuralAnswers[j] = (double)r; } } j++; } Console.WriteLine("test"); int[] idealAnswers = DenormaliseAnswers(neuralAnswers); for (int i = 0; i < answers.Count(); i++) { if (idealAnswers[i] == answers[i]) { properAnswer++; } } double error = 100.0; error = 100.0 - ((properAnswer * 100.0) / answers.Count()); return(error); }
/*Funkcja z parametrow tworzy odpowiednia siec neuronowa */ public ITrain CreateNeuronNetwork(INeuralDataSet trainingSet) { BasicNetwork network = new BasicNetwork(); if (nHelp.problem == 0) { network.AddLayer(new BasicLayer(ActivationFunction, nHelp.bias, 2)); } else { network.AddLayer(new BasicLayer(ActivationFunction, nHelp.bias, 1)); } for (int i = 0; i < nHelp.layers - 2; i++) { network.AddLayer(new BasicLayer(ActivationFunction, nHelp.bias, nHelp.neurons)); } if (nHelp.problem == 0) { network.AddLayer(new BasicLayer(ActivationFunction, false, 4)); } else { network.AddLayer(new BasicLayer(ActivationFunction, false, 1)); } network.Structure.FinalizeStructure(); network.Reset(); ITrain train = new Backpropagation(network, trainingSet, nHelp.learning, nHelp.momentum); return(train); }
/// <summary> /// Create a class to train using backpropagation. Use auto learn rate and momentum. Use the CPU to train. /// </summary> /// <param name="network">The network that is to be trained.</param> /// <param name="training">The training data to be used for backpropagation.</param> public Backpropagation(BasicNetwork network, INeuralDataSet training) : this(network, training, null, 0, 0) { AddStrategy(new SmartLearningRate()); AddStrategy(new SmartMomentum()); }
/// <summary> /// Validate that the neural network would work with the specified training set. /// </summary> /// <param name="network">The neural network that is to be evaluated.</param> /// <param name="training">The training set that we should evaluate </param> public static void ValidateNetworkForTraining(BasicNetwork network, INeuralDataSet training) { ILayer inputLayer = network.GetLayer(BasicNetwork.TAG_INPUT); ILayer outputLayer = network.GetLayer(BasicNetwork.TAG_OUTPUT); if (inputLayer == null) { throw new NeuralNetworkError("This operation requires that the neural network have an input layer."); } if (outputLayer == null) { throw new NeuralNetworkError("This operation requires that the neural network have an output layer."); } if (inputLayer.NeuronCount != training.InputSize) { throw new NeuralNetworkError("The input layer size of " + inputLayer.NeuronCount + " must match the training input size of " + training.InputSize + "."); } if (training.IdealSize > 0 && outputLayer.NeuronCount != training.IdealSize) { throw new NeuralNetworkError("The output layer size of " + inputLayer.NeuronCount + " must match the training input size of " + training.IdealSize + "."); } }
/// <summary> /// Open any datasets that were used by the input layer. /// </summary> private void OpenDataSet() { // clear out any data sets already there this.readDataSet.Clear(); this.dataSetFieldMap.Clear(); this.dataSetIteratorMap.Clear(); // only add each iterator once IDictionary <INeuralDataSet, NeuralDataFieldHolder> uniqueSets = new Dictionary <INeuralDataSet, NeuralDataFieldHolder>(); // find the unique files foreach (IInputField field in this.inputFields) { if (field is InputFieldNeuralDataSet) { InputFieldNeuralDataSet dataSetField = (InputFieldNeuralDataSet)field; INeuralDataSet dataSet = dataSetField.NeuralDataSet; if (!uniqueSets.ContainsKey(dataSet)) { IEnumerator <INeuralDataPair> iterator = dataSet .GetEnumerator(); NeuralDataFieldHolder holder = new NeuralDataFieldHolder( iterator, dataSetField); uniqueSets[dataSet] = holder; this.readDataSet.Add(iterator); } NeuralDataFieldHolder holder2 = uniqueSets[dataSet]; this.dataSetFieldMap[dataSetField] = holder2; this.dataSetIteratorMap[holder2.GetEnumerator()] = holder2; } } }
/// <summary> /// Add a subset. This method will validate that the input and /// ideal sizes are correct. /// </summary> /// <param name="set">The subset to add.</param> public void AddSubset(INeuralDataSet set) { if (set.InputSize != this.inputSize) { String str = "Subset input size of " + set.InputSize + " must match union input size of " + inputSize; #if logging UnionNeuralDataSet.logger.Error(str); #endif throw new NeuralDataError(str); } else if (set.IdealSize != this.idealSize) { String str = "Subset ideal size of " + set.IdealSize + " must match union ideal size of " + idealSize; #if logging UnionNeuralDataSet.logger.Error(str); #endif throw new NeuralDataError(str); } else { this.subsets.Add(set); } }
/// <summary> /// Encode the Encog dataset. /// </summary> /// <param name="training">The training data.</param> /// <param name="outputIndex">The ideal element to use, this is necessary becase SVM's have /// only a single output.</param> /// <returns>The SVM problem.</returns> public static svm_problem Encode(INeuralDataSet training, int outputIndex) { svm_problem result = new svm_problem(); result.l = (int)ObtainTrainingLength(training); result.y = new double[result.l]; result.x = new svm_node[result.l][]; int elementIndex = 0; foreach (INeuralDataPair pair in training) { INeuralData input = pair.Input; INeuralData output = pair.Ideal; result.x[elementIndex] = new svm_node[input.Count]; for (int i = 0; i < input.Count; i++) { result.x[elementIndex][i] = new svm_node(); result.x[elementIndex][i].index = i + 1; result.x[elementIndex][i].value_Renamed = input[i]; } result.y[elementIndex] = output[outputIndex]; elementIndex++; } return(result); }
/// <summary> /// Construct a job definition for RPROP. For more information on backprop, /// see the Backpropagation class. /// </summary> /// <param name="network">The network to use.</param> /// <param name="training">The training data to use.</param> /// <param name="loadToMemory">Should binary data be loaded to memory?</param> /// <param name="learningRate">THe learning rate to use.</param> /// <param name="momentum">The momentum to use.</param> public BPROPJob(BasicNetwork network, INeuralDataSet training, bool loadToMemory, double learningRate, double momentum) : base(network, training, loadToMemory) { this.LearningRate = learningRate; this.Momentum = momentum; }
/// <summary> /// Train, using the specified training method, display progress to a dialog /// box. /// </summary> /// <param name="train">The training method to use.</param> /// <param name="network">The network to train.</param> /// <param name="trainingSet">The training set to use.</param> public static void TrainDialog(ITrain train, BasicNetwork network, INeuralDataSet trainingSet) { TrainingDialog dialog = new TrainingDialog(); dialog.Train = train; dialog.ShowDialog(); }
/// <summary> /// Construct the instar training object. /// </summary> /// <param name="network">The network to be trained.</param> /// <param name="training">The training data.</param> /// <param name="learningRate">The learning rate.</param> public TrainInstar(BasicNetwork network, INeuralDataSet training, double learningRate) { this.network = network; this.training = training; this.learningRate = learningRate; this.parts = new FindCPN(network); }
/// <summary> /// Construct a training class. /// </summary> /// <param name="network">The network to train.</param> /// <param name="training">The training data.</param> public ScaledConjugateGradient(BasicNetwork network, INeuralDataSet training) : base(network, training) { TrainFlatNetworkSCG rpropFlat = new TrainFlatNetworkSCG( network.Structure.Flat, this.Training); this.FlatTraining = rpropFlat; }
/** * Construct an object to determine the optimal number of hidden layers and * neurons for the specified training data and pattern. * * @param training * The training data to use. * @param pattern * The network pattern to use to solve this data. * @param iterations * How many iterations to try per network. * @param report * Object used to report status to. */ public PruneIncremental(INeuralDataSet training, INeuralNetworkPattern pattern, int iterations, IStatusReportable report) : base(report) { this.training = training; this.pattern = pattern; this.iterations = iterations; }
/// <summary> /// Train the network, to a specific error, send the output to the console. /// </summary> /// <param name="network">The network to train.</param> /// <param name="trainingSet">The training set to use.</param> /// <param name="error">The error level to train to.</param> public static void TrainToError(BasicNetwork network, INeuralDataSet trainingSet, double error) { Propagation train = new ResilientPropagation(network, trainingSet); train.NumThreads = 0; EncogUtility.TrainToError(train, trainingSet, error); }
/// <summary> /// Train the neural network, using SCG training, and output status to the /// console. /// </summary> /// <param name="network">The network to train.</param> /// <param name="trainingSet">The training set.</param> /// <param name="minutes">The number of minutes to train for.</param> public static void TrainConsole(BasicNetwork network, INeuralDataSet trainingSet, int minutes) { Propagation train = new ResilientPropagation(network, trainingSet); train.NumThreads = 0; EncogUtility.TrainConsole(train, network, trainingSet, minutes); }
/// <summary> /// Train using RPROP and display progress to a dialog box. /// </summary> /// <param name="network">The network to train.</param> /// <param name="trainingSet">The training set to use.</param> public static void TrainDialog(BasicNetwork network, INeuralDataSet trainingSet) { Propagation train = new ResilientPropagation(network, trainingSet); train.NumThreads = 0; EncogUtility.TrainDialog(train, network, trainingSet); }
/// <summary> /// Load the specified training set. /// </summary> /// <param name="training">The training set to load.</param> public void Load(INeuralDataSet training) { BeginLoad(training.InputSize, training.IdealSize); foreach (INeuralDataPair pair in training) { Add(pair); } EndLoad(); }
/// <summary> /// Construct an RPROP job. For more information on RPROP see the /// ResilientPropagation class. /// </summary> /// <param name="network">The network to train.</param> /// <param name="training">The training data to use.</param> /// <param name="loadToMemory">True if binary training data should be loaded to memory.</param> /// <param name="initialUpdate">The initial update.</param> /// <param name="maxStep">The max step.</param> /// <param name="localRatio">The local ratio, used if this job is performed by an OpenCL Device.</param> /// <param name="globalRatio">The global ratio, used if this job is performed by an OpenCL Device.</param> /// <param name="segmentationRatio">The segmentation ratio, used if this job is performed by an OpenCL Device.</param> /// <param name="iterationsPer">How many iterations to process per cycle.</param> public RPROPJob(BasicNetwork network, INeuralDataSet training, bool loadToMemory, double initialUpdate, double maxStep, double localRatio, int globalRatio, double segmentationRatio, int iterationsPer) : base(network, training, loadToMemory) { this.InitialUpdate = initialUpdate; this.MaxStep = maxStep; this.LocalRatio = localRatio; this.GlobalRatio = globalRatio; this.SegmentationRatio = segmentationRatio; this.IterationsPer = iterationsPer; }
/// <summary> /// Construct a training job. /// </summary> /// <param name="network">The network to train.</param> /// <param name="training">The training data to use.</param> /// <param name="loadToMemory">True, if binary data should be loaded to memory.</param> public TrainingJob(BasicNetwork network, INeuralDataSet training, bool loadToMemory) : base() { this.Network = network; this.Training = training; this.LoadToMemory = loadToMemory; this.IterationsPer = 1; this.LocalRatio = 1.0; this.GlobalRatio = 1; this.SegmentationRatio = 1.0; }
/// <summary> /// Construct he ADALINE trainer. /// </summary> /// <param name="network">The network to train.</param> /// <param name="training">The training set.</param> /// <param name="learningRate">The learning rate.</param> public TrainAdaline(BasicNetwork network, INeuralDataSet training, double learningRate) { if (network.Structure.Layers.Count > 2) throw new NeuralNetworkError( "An ADALINE network only has two layers."); this.network = network; ILayer input = network.GetLayer(BasicNetwork.TAG_INPUT); this.synapse = input.Next[0]; this.training = training; this.learningRate = learningRate; }
/// <summary> /// Obtain the length of the training data. /// </summary> /// <param name="training">The training date to check.</param> /// <returns>The length of the training data.</returns> private static long ObtainTrainingLength(INeuralDataSet training) { if (training is IIndexable) { return ((IIndexable)training).Count; } long result = 0; IEnumerator<INeuralDataPair> itr = training.GetEnumerator(); while (itr.MoveNext()) result++; return result; }
/// <summary> /// Construct a Manhattan propagation training object. /// </summary> /// <param name="network">The network to train.</param> /// <param name="training">The training data to use.</param> /// <param name="profile">The learning rate.</param> /// <param name="learnRate">The OpenCL profile to use, null for CPU.</param> public ManhattanPropagation(BasicNetwork network, INeuralDataSet training, OpenCLTrainingProfile profile, double learnRate) : base(network, training) { if (profile == null) { FlatTraining = new TrainFlatNetworkManhattan( network.Structure.Flat, this.Training, learnRate); } #if !SILVERLIGHT else { TrainFlatNetworkOpenCL rpropFlat = new TrainFlatNetworkOpenCL( network.Structure.Flat, this.Training, profile); rpropFlat.LearnManhattan(learnRate); this.FlatTraining = rpropFlat; } #endif }
/// <summary> /// Construct a resilient training object, allow the training parameters to /// be specified. Usually the default parameters are acceptable for the /// resilient training algorithm. Therefore you should usually use the other /// constructor, that makes use of the default values. /// </summary> /// <param name="network">The network to train.</param> /// <param name="training">The training set to use.</param> /// <param name="profile">Optional EncogCL profile to execute on.</param> /// <param name="initialUpdate">The initial update values, this is the amount that the deltas /// are all initially set to.</param> /// <param name="maxStep">The maximum that a delta can reach.</param> public ResilientPropagation(BasicNetwork network, INeuralDataSet training, OpenCLTrainingProfile profile, double initialUpdate, double maxStep) : base(network, training) { if (profile == null) { TrainFlatNetworkResilient rpropFlat = new TrainFlatNetworkResilient( network.Structure.Flat, this.Training); this.FlatTraining = rpropFlat; } #if !SILVERLIGHT else { TrainFlatNetworkOpenCL rpropFlat = new TrainFlatNetworkOpenCL( network.Structure.Flat, this.Training, profile); rpropFlat.LearnRPROP(initialUpdate, maxStep); this.FlatTraining = rpropFlat; } #endif }
/// <summary> /// Construct an RPROP trainer, allows an OpenCL device to be specified. Use /// the defaults for all training parameters. Usually this is the constructor /// to use as the resilient training algorithm is designed for the default /// parameters to be acceptable for nearly all problems. /// </summary> /// <param name="network">The network to train.</param> /// <param name="training">The training data to use.</param> /// <param name="profile">The profile to use.</param> public ResilientPropagation(BasicNetwork network, INeuralDataSet training, OpenCLTrainingProfile profile) : this(network, training, profile, RPROPConst.DEFAULT_INITIAL_UPDATE, RPROPConst.DEFAULT_MAX_STEP) { }
/// <summary> /// Construct a normalized neural storage class to hold data. /// </summary> /// <param name="dataset">The data set to store to. This uses an existing data set.</param> public NormalizationStorageNeuralDataSet(INeuralDataSet dataset) { this.dataset = dataset; this.inputCount = this.dataset.InputSize; this.idealCount = this.dataset.IdealSize; }
/// <summary> /// Construct a CODEC. /// </summary> /// <param name="dataset">The dataset to use.</param> public NeuralDataSetCODEC(INeuralDataSet dataset) { this.dataset = dataset; this.inputSize = dataset.InputSize; this.idealSize = dataset.IdealSize; }
/// <summary> /// Calculate the error for this neural network. The error is calculated /// using root-mean-square(RMS). /// </summary> /// <param name="data">The training set.</param> /// <returns>The error percentage.</returns> public double CalculateError(INeuralDataSet data) { ClearContext(); ErrorCalculation errorCalculation = new ErrorCalculation(); foreach (INeuralDataPair pair in data) { INeuralData actual = Compute(pair.Input); errorCalculation.UpdateError(actual.Data, pair.Ideal.Data); } return errorCalculation.Calculate(); }
/// <summary> /// Construct an RPROP job. For more information on RPROP see the /// ResilientPropagation class. /// </summary> /// <param name="network">The network to train.</param> /// <param name="training">The training data to use.</param> /// <param name="loadToMemory">True if binary training data should be loaded to memory.</param> public RPROPJob(BasicNetwork network, INeuralDataSet training, bool loadToMemory) : this(network, training, loadToMemory, RPROPConst.DEFAULT_INITIAL_UPDATE, RPROPConst.DEFAULT_MAX_STEP, 1, 1, 1, 1) { }
/// <summary> /// Construct a resilient training object. Use the defaults for all training /// parameters. Usually this is the constructor to use as the resilient /// training algorithm is designed for the default parameters to be /// acceptable for nearly all problems. Use the CPU to train. /// </summary> /// <param name="network">The network to train.</param> /// <param name="training">The training set to use.</param> public ResilientPropagation(BasicNetwork network, INeuralDataSet training) : this(network, training, null, RPROPConst.DEFAULT_INITIAL_UPDATE, RPROPConst.DEFAULT_MAX_STEP) { }
/// <summary> /// Train to a specific error, using the specified training method, send the /// output to the console. /// </summary> /// <param name="train">The training method.</param> /// <param name="trainingSet">The training set to use.</param> /// <param name="error">The desired error level.</param> public static void TrainToError(ITrain train, INeuralDataSet trainingSet, double error) { int epoch = 1; Console.WriteLine("Beginning training..."); do { train.Iteration(); Console.WriteLine("Iteration #" + Format.FormatInteger(epoch) + " Error:" + Format.FormatPercent(train.Error) + " Target Error: " + Format.FormatPercent(error)); epoch++; } while (train.Error > error && !train.TrainingDone); train.FinishTraining(); }
/// <summary> /// Evaluate the network and display (to the console) the output for every /// value in the training set. Displays ideal and actual. /// </summary> /// <param name="network">The network to evaluate.</param> /// <param name="training">The training set to evaluate.</param> public static void Evaluate(BasicNetwork network, INeuralDataSet training) { foreach (INeuralDataPair pair in training) { INeuralData output = network.Compute(pair.Input); Console.WriteLine("Input=" + EncogUtility.FormatNeuralData(pair.Input) + ", Actual=" + EncogUtility.FormatNeuralData(output) + ", Ideal=" + EncogUtility.FormatNeuralData(pair.Ideal)); } }