/// <summary> /// Evaluate memory. /// </summary> private void EvalMemory() { BasicNeuralDataSet training = RandomTrainingFactory.Generate( 1000, 10000, 10, 10, -1, 1); long stop = (10 * Evaluate.MILIS); int record = 0; INeuralDataPair pair = BasicNeuralDataPair.CreatePair(10, 10); int iterations = 0; Stopwatch watch = new Stopwatch(); watch.Start(); while (watch.ElapsedMilliseconds < stop) { iterations++; training.GetRecord(record++, pair); if (record >= training.Count) { record = 0; } } iterations /= 100000; this.report.Report(EncogBenchmark.STEPS, EncogBenchmark.STEP3, "Memory dataset, result: " + Format.FormatInteger(iterations)); this.memoryScore = iterations; }
/// <summary> /// Construct the LMA object. /// </summary> /// <param name="network">The network to train. Must have a single output neuron.</param> /// <param name="training">The training data to use. Must be indexable.</param> public SVDTraining(BasicNetwork network, INeuralDataSet training) { ILayer outputLayer = network.GetLayer(BasicNetwork.TAG_OUTPUT); if (outputLayer == null) { throw new TrainingError("SVD requires an output layer."); } if (outputLayer.NeuronCount != 1) { throw new TrainingError("SVD requires an output layer with a single neuron."); } if (network.GetLayer(RadialBasisPattern.RBF_LAYER) == null) { throw new TrainingError("SVD is only tested to work on radial basis function networks."); } rbfLayer = (RadialBasisFunctionLayer)network.GetLayer(RadialBasisPattern.RBF_LAYER); this.Training = training; this.network = network; this.trainingLength = (int)this.Training.InputSize; BasicNeuralData input = new BasicNeuralData(this.Training.InputSize); BasicNeuralData ideal = new BasicNeuralData(this.Training.IdealSize); this.pair = new BasicNeuralDataPair(input, ideal); }
/// <summary> /// Not supported. Will throw an error. /// </summary> /// <param name="inputData">Not used.</param> public void Add(INeuralDataPair inputData) { #if logging UnionNeuralDataSet.logger.Error(ADD_ERROR); #endif throw new NeuralDataError(ADD_ERROR); }
/// <inheritdoc/> public void Write(double[] input, double[] ideal) { INeuralDataPair pair = BasicNeuralDataPair.CreatePair(inputSize, idealSize); EngineArray.ArrayCopy(input, pair.Input.Data); EngineArray.ArrayCopy(ideal, pair.Ideal.Data); }
/// <summary> /// Add input and expected output. This is used for supervised training. /// </summary> /// <param name="inputData">The input data to train on.</param> public override void Add(INeuralDataPair inputData) { if (!(inputData.Input is ImageNeuralData)) { throw new NeuralNetworkError(ImageNeuralDataSet.MUST_USE_IMAGE); } base.Add(inputData); }
/// <summary> /// Add a data pair of both input and ideal data. /// </summary> /// <param name="pair">The pair to add.</param> public void Add(INeuralDataPair pair) { if (!this.loading) { throw new NeuralDataError(BufferedNeuralDataSet.ERROR_ADD); } this.egb.Write(pair.Input.Data); this.egb.Write(pair.Ideal.Data); }
/// <inheritdoc/> public bool Read(double[] input, double[] ideal) { if (!this.enumerator.MoveNext()) { return(false); } else { INeuralDataPair pair = enumerator.Current; EngineArray.ArrayCopy(pair.Input.Data, input); EngineArray.ArrayCopy(pair.Ideal.Data, ideal); return(true); } }
/// <summary> /// Evaluate disk. /// </summary> private void EvalBinary() { String file = "temp.egb"; BasicNeuralDataSet training = RandomTrainingFactory.Generate( 1000, 10000, 10, 10, -1, 1); // create the binary file File.Delete(file); BufferedNeuralDataSet training2 = new BufferedNeuralDataSet(file); training2.Load(training); long stop = (10 * Evaluate.MILIS); int record = 0; INeuralDataPair pair = BasicNeuralDataPair.CreatePair(10, 10); Stopwatch watch = new Stopwatch(); watch.Start(); int iterations = 0; while (watch.ElapsedMilliseconds < stop) { iterations++; training2.GetRecord(record++, pair); if (record >= training2.Count) { record = 0; } } training2.Close(); iterations /= 100000; this.report.Report(EncogBenchmark.STEPS, EncogBenchmark.STEP4, "Disk(binary) dataset, result: " + Format.FormatInteger(iterations)); File.Delete(file); this.binaryScore = iterations; }
/// <summary> /// Construct the chain rule calculation. /// </summary> /// <param name="network">The network to use.</param> /// <param name="indexableTraining">The training set to use.</param> public JacobianChainRule(BasicNetwork network, IIndexable indexableTraining) { this.indexableTraining = indexableTraining; this.network = network; this.parameterSize = network.Structure.CalculateSize(); this.inputLength = (int)this.indexableTraining.Count; this.jacobian = EngineArray.AllocateDouble2D(this.inputLength, this.parameterSize); this.rowErrors = new double[this.inputLength]; BasicNeuralData input = new BasicNeuralData( this.indexableTraining.InputSize); BasicNeuralData ideal = new BasicNeuralData( this.indexableTraining.IdealSize); this.pair = new BasicNeuralDataPair(input, ideal); }
/// <summary> /// Construct the LMA object. /// </summary> /// <param name="network">The network to train. Must have a single output neuron.</param> /// <param name="training">The training data to use. Must be indexable.</param> public LevenbergMarquardtTraining(BasicNetwork network, INeuralDataSet training) { if (!(training is IIndexable)) { throw new TrainingError( "Levenberg Marquardt requires an indexable training set."); } ILayer outputLayer = network.GetLayer(BasicNetwork.TAG_OUTPUT); if (outputLayer == null) { throw new TrainingError( "Levenberg Marquardt requires an output layer."); } if (outputLayer.NeuronCount != 1) { throw new TrainingError( "Levenberg Marquardt requires an output layer with a single neuron."); } this.Training = training; this.indexableTraining = (IIndexable)Training; this.network = network; this.trainingLength = (int)this.indexableTraining.Count; this.parametersLength = this.network.Structure.CalculateSize(); this.hessianMatrix = new Matrix(this.parametersLength, this.parametersLength); this.hessian = this.hessianMatrix.Data; this.alpha = 0.0; this.beta = 1.0; this.lambda = 0.1; this.deltas = new double[this.parametersLength]; this.gradient = new double[this.parametersLength]; this.diagonal = new double[this.parametersLength]; BasicNeuralData input = new BasicNeuralData( this.indexableTraining.InputSize); BasicNeuralData ideal = new BasicNeuralData( this.indexableTraining.IdealSize); this.pair = new BasicNeuralDataPair(input, ideal); }
/// <summary> /// Called internally to advance to the next row. /// </summary> /// <returns>True if there are more rows to reed.</returns> private bool Next() { // see if any of the CSV readers want to stop foreach (ReadCSV csv in this.readCSV) { if (!csv.Next()) { return(false); } } // see if any of the data sets want to stop foreach (IEnumerator <INeuralDataPair> iterator in this.readDataSet) { if (!iterator.MoveNext()) { return(false); } NeuralDataFieldHolder holder = this.dataSetIteratorMap [iterator]; INeuralDataPair pair = iterator.Current; holder.Pair = pair; } // see if any of the arrays want to stop foreach (IInputField field in this.inputFields) { if (field is IHasFixedLength) { IHasFixedLength fl = (IHasFixedLength)field; if ((this.currentIndex + 1) >= fl.Length) { return(false); } } } this.currentIndex++; return(true); }
/// <summary> /// Called internally to obtain the current value for an input field. /// </summary> /// <param name="field">The input field to determine.</param> /// <param name="index">The current index.</param> /// <returns>The value for this input field.</returns> private double DetermineInputFieldValue(IInputField field, int index) { double result = 0; if (field is InputFieldCSV) { InputFieldCSV fieldCSV = (InputFieldCSV)field; ReadCSV csv = this.csvMap[field]; result = csv.GetDouble(fieldCSV.Offset); } else if (field is InputFieldNeuralDataSet) { InputFieldNeuralDataSet neuralField = (InputFieldNeuralDataSet)field; NeuralDataFieldHolder holder = this.dataSetFieldMap [field]; INeuralDataPair pair = holder.Pair; int offset = neuralField.Offset; if (offset < pair.Input.Count) { result = pair.Input[offset]; } else { offset -= pair.Input.Count; result = pair.Ideal[offset]; } } else { result = field.GetValue(index); } field.CurrentValue = result; return(result); }
/// <summary> /// Handle reading an item tag. /// </summary> /// <param name="xmlIn">The XML reader.</param> private void HandleItem(ReadXML xmlIn) { IDictionary <String, String> properties = xmlIn.ReadPropertyBlock(); INeuralDataPair pair = null; INeuralData input = new BasicNeuralData(NumberList .FromList(CSVFormat.EG_FORMAT, properties [BasicNeuralDataSetPersistor.TAG_INPUT])); if (properties.ContainsKey(BasicNeuralDataSetPersistor.TAG_IDEAL)) { // supervised INeuralData ideal = new BasicNeuralData(NumberList .FromList(CSVFormat.EG_FORMAT, properties [BasicNeuralDataSetPersistor.TAG_IDEAL])); pair = new BasicNeuralDataPair(input, ideal); } else { // unsupervised pair = new BasicNeuralDataPair(input); } this.currentDataSet.Add(pair); }
/// <summary> /// Calculate the derivatives for this training set element. /// </summary> /// <param name="pair">The training set element.</param> /// <returns>The sum squared of errors.</returns> private double CalculateDerivatives(INeuralDataPair pair) { // error values double e = 0.0; double sum = 0.0; IActivationFunction function = this.network.GetLayer( BasicNetwork.TAG_INPUT).ActivationFunction; NeuralOutputHolder holder = new NeuralOutputHolder(); this.network.Compute(pair.Input, holder); IList <ISynapse> synapses = this.network.Structure.Synapses; int synapseNumber = 0; ISynapse synapse = synapses[synapseNumber++]; double output = holder.Output[0]; e = pair.Ideal[0] - output; this.jacobian[this.jacobianRow][this.jacobianCol++] = CalcDerivative( function, output); for (int i = 0; i < synapse.FromNeuronCount; i++) { double lastOutput = holder.Result[synapse][i]; this.jacobian[this.jacobianRow][this.jacobianCol++] = CalcDerivative( function, output) * lastOutput; } ISynapse lastSynapse; while (synapseNumber < synapses.Count) { lastSynapse = synapse; synapse = synapses[synapseNumber++]; INeuralData outputData = holder.Result[lastSynapse]; int biasCol = this.jacobianCol; this.jacobianCol += synapse.ToLayer.NeuronCount; // for each neuron in the input layer for (int neuron = 0; neuron < synapse.ToNeuronCount; neuron++) { output = outputData[neuron]; // for each weight of the input neuron for (int i = 0; i < synapse.FromNeuronCount; i++) { sum = 0.0; // for each neuron in the next layer for (int j = 0; j < lastSynapse.ToNeuronCount; j++) { // for each weight of the next neuron for (int k = 0; k < lastSynapse.FromNeuronCount; k++) { double x = lastSynapse.WeightMatrix[k, j]; double y = output; sum += lastSynapse.WeightMatrix[k, j] * output; } sum += lastSynapse.ToLayer.BiasWeights[j]; } double x1 = CalcDerivative(function, output); double x2 = CalcDerivative2(function, sum); double x3 = holder.Result[synapse][i]; double w = lastSynapse.WeightMatrix[neuron, 0]; double val = CalcDerivative(function, output) * CalcDerivative2(function, sum) * w; double z1 = val * holder.Result[synapse][i]; double z2 = val; this.jacobian[this.jacobianRow][this.jacobianCol++] = val * holder.Result[synapse][i]; this.jacobian[this.jacobianRow][biasCol + neuron] = val; } } } // return error return(e); }
/// <summary> /// Obtain the next pair. /// </summary> public void ObtainPair() { this.iterator.MoveNext(); this.pair = this.iterator.Current; }
/// <summary> /// Construct the LMA object. /// </summary> /// <param name="network">The network to train. Must have a single output neuron.</param> /// <param name="training">The training data to use. Must be indexable.</param> public SVDTraining(BasicNetwork network, INeuralDataSet training) { ILayer outputLayer = network.GetLayer(BasicNetwork.TAG_OUTPUT); if (outputLayer == null) { throw new TrainingError("SVD requires an output layer."); } if (outputLayer.NeuronCount != 1) { throw new TrainingError("SVD requires an output layer with a single neuron."); } if (network.GetLayer(RadialBasisPattern.RBF_LAYER) == null) throw new TrainingError("SVD is only tested to work on radial basis function networks."); rbfLayer = (RadialBasisFunctionLayer)network.GetLayer(RadialBasisPattern.RBF_LAYER); this.Training = training; this.network = network; this.trainingLength = (int)this.Training.InputSize; BasicNeuralData input = new BasicNeuralData(this.Training.InputSize); BasicNeuralData ideal = new BasicNeuralData(this.Training.IdealSize); this.pair = new BasicNeuralDataPair(input, ideal); }
/// <summary> /// Not supported. /// </summary> /// <param name="inputData">Not used.</param> public void Add(INeuralDataPair inputData) { throw new TrainingError(FoldedDataSet.ADD_NOT_SUPPORTED); }
/*Zdazenie obslugujace klikniecie guzika Start. Funkcje sprawdza czy parametry sa poprawne. Jezeli tak * to kontunuuje. Nastepnie przygotowuje zbiór traninowy * Nastepnie tworzymy sieć. Na samym koncu uruchamiane sa obliczenia */ private void Start_Click(object sender, RoutedEventArgs e) { errors = new List <double>(); if (ValidateIntput() == false) { return; } INeuralDataSet learningSet, trainingSet; learningSet = CombineTrainingSet(neuralInput, neuralIdeal); trainingSet = CombineTrainingSet(neuralTestInput, neuralAnswers); ITrain learning = CreateNeuronNetwork(learningSet); int iteracja = 0; INeuralData data = null; INeuralDataPair dataPair = null; foreach (INeuralDataPair pair in learningSet) { dataPair = pair; data = pair.Input; break; } INeuralData tmp = null; do { learning.Iteration(); Console.WriteLine("OUTPUT IDEALNY: " + dataPair.Ideal); tmp = learning.Network.Compute(data); Console.WriteLine("WARTOŚĆ OFICJALNA:" + tmp.Data[0]); Console.WriteLine("Epoch #" + iteracja + " Error:" + learning.Error); errors.Add(learning.Error); iteracja++; } while ((iteracja < nHelp.liczbaIteracji) && (learning.Error > 0.0005)); // TUTAJ SKONCZYL SIE PROCES NAUKI // POWINNISMY NA TA SIEC NALOZYC TERAZ ZBIOR TESTOWY // ORAZ NARYSOWAC GRAFY int i = 0; Console.WriteLine("Neural Network Results:"); foreach (INeuralDataPair pair in trainingSet) { INeuralData output = learning.Network.Compute(pair.Input); if (CBProblem.SelectedIndex == 0) { if (sety == 4) { if ((double)(output[0]) >= 0.6) { neuralAnswer[i] = 1.0; } else if ((double)(output[1]) >= 0.6) { neuralAnswer[i] = 2.0; } else if ((double)(output[2]) >= 0.6) { neuralAnswer[i] = 3.0; } else { neuralAnswer[i] = 4.0; } } else if (sety == 3) { if ((double)(output[0]) >= 0.6) { neuralAnswer[i] = 1.0; } else if ((double)(output[1]) >= 0.6) { neuralAnswer[i] = 2.0; } else { neuralAnswer[i] = 3.0; } } } else { neuralAnswer[i] = output[0]; } i++; } Console.WriteLine("Calculated"); CreateErrorFile(); if (CBProblem.SelectedIndex == 0) { CreateClassificationFile(); } else { CreateRegressionFile(); } }
/// <summary> /// Adding directly is not supported. Rather, add temporal points and /// generate the training data. /// </summary> /// <param name="inputData">Not used.</param> public override void Add(INeuralDataPair inputData) { throw new TemporalError(TemporalNeuralDataSet.ADD_NOT_SUPPORTED); }
/// <summary> /// Calculate the derivatives for this training set element. /// </summary> /// <param name="pair">The training set element.</param> /// <returns>The sum squared of errors.</returns> private double CalculateDerivatives(INeuralDataPair pair) { // error values double e = 0.0; double sum = 0.0; IActivationFunction function = this.network.GetLayer( BasicNetwork.TAG_INPUT).ActivationFunction; NeuralOutputHolder holder = new NeuralOutputHolder(); this.network.Compute(pair.Input, holder); IList<ISynapse> synapses = this.network.Structure.Synapses; int synapseNumber = 0; ISynapse synapse = synapses[synapseNumber++]; double output = holder.Output[0]; e = pair.Ideal[0] - output; this.jacobian[this.jacobianRow][this.jacobianCol++] = CalcDerivative( function, output); for (int i = 0; i < synapse.FromNeuronCount; i++) { double lastOutput = holder.Result[synapse][i]; this.jacobian[this.jacobianRow][this.jacobianCol++] = CalcDerivative( function, output) * lastOutput; } ISynapse lastSynapse; while (synapseNumber < synapses.Count) { lastSynapse = synapse; synapse = synapses[synapseNumber++]; INeuralData outputData = holder.Result[lastSynapse]; int biasCol = this.jacobianCol; this.jacobianCol += synapse.ToLayer.NeuronCount; // for each neuron in the input layer for (int neuron = 0; neuron < synapse.ToNeuronCount; neuron++) { output = outputData[neuron]; // for each weight of the input neuron for (int i = 0; i < synapse.FromNeuronCount; i++) { sum = 0.0; // for each neuron in the next layer for (int j = 0; j < lastSynapse.ToNeuronCount; j++) { // for each weight of the next neuron for (int k = 0; k < lastSynapse.FromNeuronCount; k++) { double x = lastSynapse.WeightMatrix[k, j]; double y = output; sum += lastSynapse.WeightMatrix[k, j] * output; } sum += lastSynapse.ToLayer.BiasWeights[j]; } double x1 = CalcDerivative(function, output); double x2 = CalcDerivative2(function, sum); double x3 = holder.Result[synapse][i]; double w = lastSynapse.WeightMatrix[neuron, 0]; double val = CalcDerivative(function, output) * CalcDerivative2(function, sum) * w; double z1 = val * holder.Result[synapse][i]; double z2 = val; this.jacobian[this.jacobianRow][this.jacobianCol++] = val * holder.Result[synapse][i]; this.jacobian[this.jacobianRow][biasCol + neuron] = val; } } } // return error return e; }