/// <summary> /// Get the maximum, over all the data, for the specified index. /// </summary> /// /// <param name="index">An index into the input data.</param> /// <returns>The maximum value.</returns> private double GetMaxValue(int index) { double result = Double.MinValue; long count = _set.Count; for (int i = 0; i < count; i++) { IMLDataPair pair = BasicMLDataPair.CreatePair( _set.InputSize, _set.IdealSize); _set.GetRecord(index, pair); result = Math.Max(result, pair.InputArray[index]); } return(result); }
/// <summary> /// Calculate the SSE error. /// </summary> /// <returns>The SSE error with the current weights.</returns> private double CalculateError() { var result = new ErrorCalculation(); for (int i = 0; i < _trainingLength; i++) { _indexableTraining.GetRecord(i, _pair); IMLData actual = _network.Compute(_pair.Input); result.UpdateError(actual.Data, _pair.Ideal.Data, _pair.Significance); } return(result.CalculateSSE()); }
/// <summary> /// Calculate the error for this neural network. The error is calculated /// using root-mean-square(RMS). /// </summary> /// /// <param name="data">The training set.</param> /// <returns>The error percentage.</returns> public double CalculateError(IMLDataSet data) { var errorCalculation = new ErrorCalculation(); var actual = new double[_outputCount]; IMLDataPair pair = BasicMLDataPair.CreatePair(data.InputSize, data.IdealSize); for (int i = 0; i < data.Count; i++) { data.GetRecord(i, pair); Compute(pair.InputArray, actual); errorCalculation.UpdateError(actual, pair.IdealArray, pair.Significance); } return(errorCalculation.Calculate()); }
/// <inheritdoc/> public void Run() { _error = 0; EngineArray.Fill(_totDeriv, 0); EngineArray.Fill(_gradients, 0); // Loop over every training element for (int i = _low; i <= _high; i++) { _training.GetRecord(i, _pair); EngineArray.Fill(_derivative, 0); Process(_outputNeuron, _pair.InputArray, _pair.IdealArray); } }
/// <summary> /// Calculate the Jacobian matrix. /// </summary> /// /// <param name="weights">The weights for the neural network.</param> /// <returns>The sum squared of the weights.</returns> public virtual double Calculate(double[] weights) { double result = 0.0d; for (int i = 0; i < _inputLength; i++) { _jacobianRow = i; _jacobianCol = 0; _indexableTraining.GetRecord(i, _pair); double e = CalculateDerivatives(_pair); _rowErrors[i] = e; result += e * e; } return(result / 2.0d); }
/// <summary> /// Perform the gradient calculation for the specified index range. /// </summary> /// public void Run() { try { _errorCalculation.Reset(); for (int i = _low; i <= _high; i++) { _training.GetRecord(i, _pair); Process(_pair.InputArray, _pair.IdealArray, _pair.Significance); } double error = _errorCalculation.Calculate(); _owner.Report(_gradients, error, null); EngineArray.Fill(_gradients, 0); } catch (Exception ex) { _owner.Report(null, 0, ex); } }
public double CalculateError(IMLDataSet data) { double[] numArray; IMLDataPair pair; int num; ErrorCalculation calculation = new ErrorCalculation(); goto Label_0057; Label_0031: num++; Label_0035: if (num < data.Count) { data.GetRecord((long) num, pair); this.Compute(pair.InputArray, numArray); calculation.UpdateError(numArray, pair.IdealArray, pair.Significance); goto Label_0031; } if ((((uint) num) | 8) != 0) { return calculation.Calculate(); } Label_0057: numArray = new double[this._outputCount]; if (0 != 0) { goto Label_0031; } pair = BasicMLDataPair.CreatePair(data.InputSize, data.IdealSize); num = 0; goto Label_0035; }
/// <summary> /// Calculate the error for the entire training set. /// </summary> /// /// <param name="training">Training set to use.</param> /// <param name="deriv">Should we find the derivative.</param> /// <returns>The error.</returns> public double CalculateError(IMLDataSet training, bool deriv) { double totErr; double diff; totErr = 0.0d; if (deriv) { int num = (_network.SeparateClass) ? _network.InputCount * _network.OutputCount : _network.InputCount; for (int i = 0; i < num; i++) { _network.Deriv[i] = 0.0d; _network.Deriv2[i] = 0.0d; } } _network.Exclude = (int)training.Count; IMLDataPair pair = BasicMLDataPair.CreatePair( training.InputSize, training.IdealSize); var xout = new double[_network.OutputCount]; for (int r = 0; r < training.Count; r++) { training.GetRecord(r, pair); _network.Exclude = _network.Exclude - 1; double err = 0.0d; IMLData input = pair.Input; IMLData target = pair.Ideal; if (_network.OutputMode == PNNOutputMode.Unsupervised) { if (deriv) { IMLData output = ComputeDeriv(input, target); for (int z = 0; z < _network.OutputCount; z++) { xout[z] = output[z]; } } else { IMLData output = _network.Compute(input); for (int z = 0; z < _network.OutputCount; z++) { xout[z] = output[z]; } } for (int i = 0; i < _network.OutputCount; i++) { diff = input[i] - xout[i]; err += diff * diff; } } else if (_network.OutputMode == PNNOutputMode.Classification) { var tclass = (int)target[0]; IMLData output; if (deriv) { output = ComputeDeriv(input, pair.Ideal); //output_4.GetData(0); //**FIX**? } else { output = _network.Compute(input); //output_4.GetData(0); **FIX**? } xout[0] = output[0]; for (int i = 0; i < xout.Length; i++) { if (i == tclass) { diff = 1.0d - xout[i]; err += diff * diff; } else { err += xout[i] * xout[i]; } } } else if (_network.OutputMode == PNNOutputMode.Regression) { if (deriv) { IMLData output = _network.Compute(input); for (int z = 0; z < _network.OutputCount; z++) { xout[z] = output[z]; } } else { IMLData output = _network.Compute(input); for (int z = 0; z < _network.OutputCount; z++) { xout[z] = output[z]; } } for (int i = 0; i < _network.OutputCount; i++) { diff = target[i] - xout[i]; err += diff * diff; } } totErr += err; } _network.Exclude = -1; _network.Error = totErr / training.Count; if (deriv) { for (int i = 0; i < _network.Deriv.Length; i++) { _network.Deriv[i] /= training.Count; _network.Deriv2[i] /= training.Count; } } if ((_network.OutputMode == PNNOutputMode.Unsupervised) || (_network.OutputMode == PNNOutputMode.Regression)) { _network.Error = _network.Error / _network.OutputCount; if (deriv) { for (int i = 0; i < _network.InputCount; i++) { _network.Deriv[i] /= _network.OutputCount; _network.Deriv2[i] /= _network.OutputCount; } } } return(_network.Error); }
/// <summary> /// Calculate the error for this neural network. The error is calculated /// using root-mean-square(RMS). /// </summary> /// /// <param name="data">The training set.</param> /// <returns>The error percentage.</returns> public double CalculateError(IMLDataSet data) { var errorCalculation = new ErrorCalculation(); var actual = new double[_outputCount]; IMLDataPair pair = BasicMLDataPair.CreatePair(data.InputSize, data.IdealSize); for (int i = 0; i < data.Count; i++) { data.GetRecord(i, pair); Compute(pair.InputArray, actual); errorCalculation.UpdateError(actual, pair.IdealArray,pair.Significance); } return errorCalculation.Calculate(); }
/// <summary> /// Calculate the error for the entire training set. /// </summary> /// /// <param name="training">Training set to use.</param> /// <param name="deriv">Should we find the derivative.</param> /// <returns>The error.</returns> public double CalculateError(IMLDataSet training, bool deriv) { double totErr; double diff; totErr = 0.0d; if (deriv) { int num = (_network.SeparateClass) ? _network.InputCount*_network.OutputCount : _network.InputCount; for (int i = 0; i < num; i++) { _network.Deriv[i] = 0.0d; _network.Deriv2[i] = 0.0d; } } _network.Exclude = (int) training.Count; IMLDataPair pair = BasicMLDataPair.CreatePair( training.InputSize, training.IdealSize); var xout = new double[_network.OutputCount]; for (int r = 0; r < training.Count; r++) { training.GetRecord(r, pair); _network.Exclude = _network.Exclude - 1; double err = 0.0d; IMLData input = pair.Input; IMLData target = pair.Ideal; if (_network.OutputMode == PNNOutputMode.Unsupervised) { if (deriv) { IMLData output = ComputeDeriv(input, target); for (int z = 0; z < _network.OutputCount; z++) { xout[z] = output[z]; } } else { IMLData output = _network.Compute(input); for (int z = 0; z < _network.OutputCount; z++) { xout[z] = output[z]; } } for (int i = 0; i < _network.OutputCount; i++) { diff = input[i] - xout[i]; err += diff*diff; } } else if (_network.OutputMode == PNNOutputMode.Classification) { var tclass = (int) target[0]; IMLData output; if (deriv) { output = ComputeDeriv(input, pair.Ideal); //output_4.GetData(0); //**FIX**? } else { output = _network.Compute(input); //output_4.GetData(0); **FIX**? } xout[0] = output[0]; for (int i = 0; i < xout.Length; i++) { if (i == tclass) { diff = 1.0d - xout[i]; err += diff*diff; } else { err += xout[i]*xout[i]; } } } else if (_network.OutputMode == PNNOutputMode.Regression) { if (deriv) { IMLData output = _network.Compute(input); for (int z = 0; z < _network.OutputCount; z++) { xout[z] = output[z]; } } else { IMLData output = _network.Compute(input); for (int z = 0; z < _network.OutputCount; z++) { xout[z] = output[z]; } } for (int i = 0; i < _network.OutputCount; i++) { diff = target[i] - xout[i]; err += diff*diff; } } totErr += err; } _network.Exclude = -1; _network.Error = totErr/training.Count; if (deriv) { for (int i = 0; i < _network.Deriv.Length; i++) { _network.Deriv[i] /= training.Count; _network.Deriv2[i] /= training.Count; } } if ((_network.OutputMode == PNNOutputMode.Unsupervised) || (_network.OutputMode == PNNOutputMode.Regression)) { _network.Error = _network.Error /_network.OutputCount; if (deriv) { for (int i = 0; i < _network.InputCount; i++) { _network.Deriv[i] /= _network.OutputCount; _network.Deriv2[i] /= _network.OutputCount; } } } return _network.Error; }
/// <summary> /// Perform one iteration. /// </summary> /// public override void Iteration() { LUDecomposition decomposition = null; PreIteration(); _weights = NetworkCODEC.NetworkToArray(_network); IComputeJacobian j = new JacobianChainRule(_network, _indexableTraining); double sumOfSquaredErrors = j.Calculate(_weights); double sumOfSquaredWeights = CalculateSumOfSquaredWeights(); // this.setError(j.getError()); CalculateHessian(j.Jacobian, j.RowErrors); // Define the objective function // bayesian regularization objective function double objective = _beta * sumOfSquaredErrors + _alpha * sumOfSquaredWeights; double current = objective + 1.0d; // Start the main Levenberg-Macquardt method _lambda /= ScaleLambda; // We'll try to find a direction with less error // (or where the objective function is smaller) while ((current >= objective) && (_lambda < LambdaMax)) { _lambda *= ScaleLambda; // Update diagonal (Levenberg-Marquardt formula) for (int i = 0; i < _parametersLength; i++) { _hessian[i][i] = _diagonal[i] + (_lambda + _alpha); } // Decompose to solve the linear system decomposition = new LUDecomposition(_hessianMatrix); // Check if the Jacobian has become non-invertible if (!decomposition.IsNonsingular) { continue; } // Solve using LU (or SVD) decomposition _deltas = decomposition.Solve(_gradient); // Update weights using the calculated deltas sumOfSquaredWeights = UpdateWeights(); // Calculate the new error sumOfSquaredErrors = 0.0d; for (int i = 0; i < _trainingLength; i++) { _indexableTraining.GetRecord(i, _pair); IMLData actual = _network .Compute(_pair.Input); double e = _pair.Ideal[0] - actual[0]; sumOfSquaredErrors += e * e; } sumOfSquaredErrors /= 2.0d; // Update the objective function current = _beta * sumOfSquaredErrors + _alpha * sumOfSquaredWeights; // If the object function is bigger than before, the method // is tried again using a greater dumping factor. } // If this iteration caused a error drop, then next iteration // will use a smaller damping factor. _lambda /= ScaleLambda; if (_useBayesianRegularization && (decomposition != null)) { // Compute the trace for the inverse Hessian double trace = Trace(decomposition.Inverse()); // Poland update's formula: _gamma = _parametersLength - (_alpha * trace); _alpha = _parametersLength / (2.0d * sumOfSquaredWeights + trace); _beta = Math.Abs((_trainingLength - _gamma) / (2.0d * sumOfSquaredErrors)); } Error = sumOfSquaredErrors; PostIteration(); }
/// <summary> /// Get a record. /// </summary> /// <param name="index">The index.</param> /// <param name="pair">The record.</param> public void GetRecord(long index, IMLDataPair pair) { _underlying.GetRecord(CurrentFoldOffset + index, pair); }