/// <summary> /// Evaluate the error for the specified model. /// </summary> /// /// <param name="param">The params for the SVN.</param> /// <param name="prob">The problem to evaluate.</param> /// <param name="target">The output values from the SVN.</param> /// <returns>The calculated error.</returns> private static double Evaluate(svm_parameter param, svm_problem prob, double[] target) { int totalCorrect = 0; var error = new ErrorCalculation(); if ((param.svm_type == svm_parameter.EPSILON_SVR) || (param.svm_type == svm_parameter.NU_SVR)) { for (int i = 0; i < prob.l; i++) { double ideal = prob.y[i]; double actual = target[i]; error.UpdateError(actual, ideal); } return(error.Calculate()); } for (int i = 0; i < prob.l; i++) { if (target[i] == prob.y[i]) { ++totalCorrect; } } return(Format.HundredPercent * totalCorrect / prob.l); }
/// <inheritdoc /> public override sealed void Iteration() { if (_mustInit) { InitWeight(); } var error = new ErrorCalculation(); foreach (IMLDataPair pair in _training) { IMLData xout = _network.ComputeInstar(pair.Input); int j = EngineArray.IndexOfLargest(xout); for (int i = 0; i < _network.OutstarCount; i++) { double delta = _learningRate * (pair.Ideal[i] - _network.WeightsInstarToOutstar[j, i]); _network.WeightsInstarToOutstar.Add(j, i, delta); } IMLData out2 = _network.ComputeOutstar(xout); error.UpdateError(out2, pair.Ideal, pair.Significance); } Error = error.Calculate(); }
/// <summary> /// Evaluate the error for the specified model. /// </summary> /// <param name="param">The params for the SVN.</param> /// <param name="prob">The problem to evaluate.</param> /// <param name="target">The output values from the SVN.</param> /// <returns>The calculated error.</returns> private double Evaluate(svm_parameter param, svm_problem prob, double[] target) { int total_correct = 0; ErrorCalculation error = new ErrorCalculation(); if (param.svm_type == svm_parameter.EPSILON_SVR || param.svm_type == svm_parameter.NU_SVR) { for (int i = 0; i < prob.l; i++) { double ideal = prob.y[i]; double actual = target[i]; error.UpdateError(actual, ideal); } return(error.Calculate()); } else { for (int i = 0; i < prob.l; i++) { if (target[i] == prob.y[i]) { ++total_correct; } } return(100.0 * total_correct / prob.l); } }
/// <summary> /// Perform one training iteration. /// </summary> public override void Iteration() { if (this.mustInit) { InitWeight(); } ErrorCalculation error = new ErrorCalculation(); foreach (INeuralDataPair pair in this.training) { INeuralData output = this.parts.InstarSynapse.Compute( pair.Input); int j = this.parts.Winner(output); for (int i = 0; i < this.parts.OutstarLayer.NeuronCount; i++) { double delta = this.learningRate * (pair.Ideal[i] - this.parts .OutstarSynapse.WeightMatrix[j, i]); this.parts.OutstarSynapse.WeightMatrix.Add(j, i, delta); } error.UpdateError(output.Data, pair.Ideal.Data); } this.Error = error.Calculate(); }
public List <PredictionResults> Predict(DateTime predictFrom, DateTime predictTo) { List <PredictionResults> results = new List <PredictionResults>(); double[] present = new double[InputTuples * IndexesToConsider]; double[] actualOutput = new double[OutputSize]; int index = 0; foreach (var sample in _manager.Samples) { if (sample.Date.CompareTo(predictFrom) > 0 && sample.Date.CompareTo(predictTo) < 0) { var result = new PredictionResults(); _manager.GetInputData(index - InputTuples, present); _manager.GetOutputData(index - InputTuples, actualOutput); var data = new BasicNeuralData(present); var predict = _network.Compute(data); result.ActualLotos = actualOutput[0] * (_manager.MaxLotos - _manager.MinLotos) + _manager.MinLotos; result.PredictedLotos = predict[0] * (_manager.MaxLotos - _manager.MinLotos) + _manager.MinLotos; result.ActualPir = actualOutput[1] * (_manager.MaxPrimeRate - _manager.MinPrimeRate) + _manager.MinPrimeRate; result.PredictedPir = predict[1] * (_manager.MaxPrimeRate - _manager.MinPrimeRate) + _manager.MinPrimeRate; result.ActualOrlen = actualOutput[2] * (_manager.MaxOrlen - _manager.MinOrlen) + _manager.MinOrlen; result.PredictedOrlen = predict[2] * (_manager.MaxOrlen - _manager.MinOrlen) + _manager.MinOrlen; result.Date = sample.Date; var error = new ErrorCalculation(); error.UpdateError(actualOutput, predict.Data); result.Error = error.CalculateRMS(); results.Add(result); } index++; } return(results); }
/// <summary> /// Construct a gradient worker. /// </summary> /// /// <param name="network">The network to train.</param> /// <param name="owner">The owner that is doing the training.</param> /// <param name="training">The training data.</param> /// <param name="low">The low index to use in the training data.</param> /// <param name="high">The high index to use in the training data.</param> public GradientWorkerCPU(FlatNetwork network, TrainFlatNetworkProp owner, IEngineIndexableSet training, int low, int high) { this.errorCalculation = new ErrorCalculation(); this.network = network; this.training = training; this.low = low; this.high = high; this.owner = owner; this.stopwatch = new Stopwatch(); this.layerDelta = new double[network.LayerOutput.Length]; this.gradients = new double[network.Weights.Length]; this.actual = new double[network.OutputCount]; this.weights = network.Weights; this.layerIndex = network.LayerIndex; this.layerCounts = network.LayerCounts; this.weightIndex = network.WeightIndex; this.layerOutput = network.LayerOutput; this.layerFeedCounts = network.LayerFeedCounts; this.pair = BasicEngineData.CreatePair(network.InputCount, network.OutputCount); }
/// <inheritdoc /> private void InternalCompute(int outputNeuron) { int row = 0; var error = new ErrorCalculation(); var derivative = new double[_weightCount]; // Loop over every training element foreach (IMLDataPair pair in _training) { EngineArray.Fill(derivative, 0); IMLData networkOutput = _network.Compute(pair.Input); double e = pair.Ideal[outputNeuron] - networkOutput[outputNeuron]; error.UpdateError(networkOutput[outputNeuron], pair.Ideal[outputNeuron]); int currentWeight = 0; // loop over the output weights int outputFeedCount = _network.GetLayerTotalNeuronCount(_network.LayerCount - 2); for (int i = 0; i < _network.OutputCount; i++) { for (int j = 0; j < outputFeedCount; j++) { double jc; if (i == outputNeuron) { jc = ComputeDerivative(pair.Input, outputNeuron, currentWeight, _dStep, networkOutput[outputNeuron], row); } else { jc = 0; } _gradients[currentWeight] += jc * e; derivative[currentWeight] = jc; currentWeight++; } } // Loop over every weight in the neural network while (currentWeight < _network.Flat.Weights.Length) { double jc = ComputeDerivative( pair.Input, outputNeuron, currentWeight, _dStep, networkOutput[outputNeuron], row); derivative[currentWeight] = jc; _gradients[currentWeight] += jc * e; currentWeight++; } row++; UpdateHessian(derivative); } _sse += error.CalculateSSE(); }
/// <summary> /// Process training for pure batch mode (one single batch). /// </summary> protected void ProcessPureBatch() { var errorCalc = new ErrorCalculation(); _visited.Clear(); foreach (IMLDataPair pair in _training) { var input = pair.Input; var ideal = pair.Ideal; var actual = _network.Compute(input); var sig = pair.Significance; errorCalc.UpdateError(actual, ideal, sig); for (int i = 0; i < _network.OutputCount; i++) { var diff = (ideal[i] - actual[i]) * sig; IFreeformNeuron neuron = _network.OutputLayer.Neurons[i]; CalculateOutputDelta(neuron, diff); CalculateNeuronGradient(neuron); } } // Set the overall error. Error = errorCalc.Calculate(); // Learn for all data. Learn(); }
/// <summary> /// Construct a gradient worker. /// </summary> /// /// <param name="theNetwork">The network to train.</param> /// <param name="theOwner">The owner that is doing the training.</param> /// <param name="theTraining">The training data.</param> /// <param name="theLow">The low index to use in the training data.</param> /// <param name="theHigh">The high index to use in the training data.</param> /// <param name="theFlatSpots">Holds an array of flat spot constants.</param> public GradientWorker(FlatNetwork theNetwork, Propagation theOwner, IMLDataSet theTraining, int theLow, int theHigh, double[] theFlatSpots, IErrorFunction ef) { _errorCalculation = new ErrorCalculation(); _network = theNetwork; _training = theTraining; _low = theLow; _high = theHigh; _owner = theOwner; _flatSpot = theFlatSpots; _layerDelta = new double[_network.LayerOutput.Length]; _gradients = new double[_network.Weights.Length]; _actual = new double[_network.OutputCount]; _weights = _network.Weights; _layerIndex = _network.LayerIndex; _layerCounts = _network.LayerCounts; _weightIndex = _network.WeightIndex; _layerOutput = _network.LayerOutput; _layerSums = _network.LayerSums; _layerFeedCounts = _network.LayerFeedCounts; _ef = ef; }
public static void CompareMCYT(string aFolder, int aNrOfTrainingSamples, DTWConfiguration aDTWConfig, ref StreamWriter aSWriter) { List <Signature> lSignatures = SignatureFileUtils.GetAllSignaturesFromFolder(aFolder); List <Signature> lTemplate = lSignatures.Skip(25).Take(aNrOfTrainingSamples).ToList(); List <Signature> lOriginalSignatures = lSignatures.Skip(25 + aNrOfTrainingSamples).Take(25 - aNrOfTrainingSamples).ToList(); List <Signature> lImpostorSignatures = lSignatures.Take(15).ToList(); for (int i = 0; i < lTemplate.Count; ++i) { var lElement = lTemplate.ElementAt(i); var lNewElement = SignatureUtils.SignatureUtils.CalculateCharacteristics(lElement); lNewElement = SignatureUtils.SignatureUtils.StandardizeSignature(lNewElement); lTemplate.RemoveAt(i); lTemplate.Insert(i, lNewElement); } for (int i = 0; i < lOriginalSignatures.Count; ++i) { var lElement = lOriginalSignatures.ElementAt(i); var lNewElement = SignatureUtils.SignatureUtils.CalculateCharacteristics(lElement); lNewElement = SignatureUtils.SignatureUtils.StandardizeSignature(lNewElement); lOriginalSignatures.RemoveAt(i); lOriginalSignatures.Insert(i, lNewElement); } for (int i = 0; i < lImpostorSignatures.Count; ++i) { var lElement = lImpostorSignatures.ElementAt(i); var lNewElement = SignatureUtils.SignatureUtils.CalculateCharacteristics(lElement); lNewElement = SignatureUtils.SignatureUtils.StandardizeSignature(lNewElement); lImpostorSignatures.RemoveAt(i); lImpostorSignatures.Insert(i, lNewElement); } List <double> lOriginalScores = SignatureUtils.SignatureUtils.CompareSignaturesDTW(lTemplate, lOriginalSignatures, aDTWConfig); List <double> lImpostorScores = SignatureUtils.SignatureUtils.CompareSignaturesDTW(lTemplate, lImpostorSignatures, aDTWConfig); ErrorCalculation lError = ErrorCalculationFactory.GetDScoreErrorCalculator(); lError.CalculateErrors(lOriginalScores, lImpostorScores, 100); //var lFARList = lError.GetFARList(); //var lFRRList = lError.GetFRRList(); //var lTresholdList = lError.GetThresholdList(); //for(int i = 0; i < lFARList.Count; ++i) //{ // aSWriter.WriteLine(lFARList.ElementAt(i) + "," + lFRRList.ElementAt(i) + ", " + lTresholdList.ElementAt(i)); //} aSWriter.WriteLine(lError.GetERR()); }
public override sealed void Iteration() { if (this._x268cb8b20222b0dc) { this.xabfa4e7d76a2422c(); } ErrorCalculation calculation = new ErrorCalculation(); using (IEnumerator<IMLDataPair> enumerator = this._x823a2b9c8bf459c5.GetEnumerator()) { IMLDataPair pair; IMLData data; int num; int num2; double num3; IMLData data2; goto Label_005E; Label_0023: if (num2 < this._x87a7fc6a72741c2e.OutstarCount) { goto Label_0091; } Label_0032: data2 = this._x87a7fc6a72741c2e.ComputeOutstar(data); calculation.UpdateError(data2.Data, pair.Ideal.Data, pair.Significance); Label_005E: if (!enumerator.MoveNext() && ((((uint) num2) - ((uint) num3)) <= uint.MaxValue)) { goto Label_014D; } goto Label_00D2; Label_0084: num2++; goto Label_0124; Label_0091: num3 = this._x9b481c22b6706459 * (pair.Ideal[num2] - this._x87a7fc6a72741c2e.WeightsInstarToOutstar[num, num2]); this._x87a7fc6a72741c2e.WeightsInstarToOutstar.Add(num, num2, num3); goto Label_0084; Label_00D2: pair = enumerator.Current; data = this._x87a7fc6a72741c2e.ComputeInstar(pair.Input); num = EngineArray.IndexOfLargest(data.Data); num2 = 0; if ((((uint) num2) <= uint.MaxValue) && (-2 == 0)) { goto Label_0032; } goto Label_0023; Label_0124: if ((((uint) num) - ((uint) num)) <= uint.MaxValue) { goto Label_0023; } } Label_014D: this.Error = calculation.Calculate(); }
public void validate_bidirectional_CAW_RDiffuse() { var analyticSolution = BidirectionalAnalyticSolutions.GetBidirectionalRadianceInSlab( _slabThickness, new OpticalProperties(_mua, _musp, _g, 1.0), -1, // direction -1=up 0); // position at surface var sd = ErrorCalculation.StandardDeviation(_output.Input.N, _output.Rd, _output.Rd2); Assert.Less(Math.Abs(_output.Rd - analyticSolution), 3 * sd); }
/// <summary> /// Calculate the error for this neural network. The error is calculated using Root Mean Square. /// </summary> /// <param name="input">The input patterns.</param> /// <param name="ideal">The output patterns.</param> /// <returns>The error percentage.</returns> public double CalculateError(double[][] input, double[][] ideal) { ErrorCalculation errorCalc = new ErrorCalculation(); for (int i = 0; i < ideal.Length; i++) { ComputeOutputs(input[i]); errorCalc.UpdateError(_outputLayer.Fire, ideal[i]); } return(errorCalc.RootMeanSquare()); }
public void validate_bidirectional_analog_TDiffuse() { var analyticSolution = BidirectionalAnalyticSolutions.GetBidirectionalRadianceInSlab( _slabThickness, new OpticalProperties(_mua, _musp, _g, 1.0), 1, // direction 1=down _slabThickness); // position at slab end var sd = ErrorCalculation.StandardDeviation(_output.Input.N, _output.Td, _output.Td2); Assert.Less(Math.Abs(_output.Td - analyticSolution), 3 * sd); }
public void validate_CAW_RDiffuse() { var sdOneLayerTissue = ErrorCalculation.StandardDeviation( _outputOneLayerTissue.Input.N, _outputOneLayerTissue.Rd, _outputOneLayerTissue.Rd2); var sdTwoLayerTissue = ErrorCalculation.StandardDeviation( _outputTwoLayerTissue.Input.N, _outputTwoLayerTissue.Rd, _outputTwoLayerTissue.Rd2); Assert.Less(Math.Abs(_outputOneLayerTissue.Rd * _factor - 0.572710099), 0.000000001); // figure out best check of two below Assert.Less(Math.Abs(_outputTwoLayerTissue.Rd * _factor - 0.572710099), 1 * sdOneLayerTissue); Assert.Less(Math.Abs(_outputTwoLayerTissue.Rd * _factor - 0.572710099), 0.000000001); }
/// <summary> /// Calculate the error for this neural network. The error is calculated /// using root-mean-square(RMS). /// </summary> /// <param name="input">Input patterns.</param> /// <param name="ideal">Ideal patterns.</param> /// <returns>The error percentage.</returns> public double CalculateError(double[][] input, double[][] ideal) { ErrorCalculation errorCalculation = new ErrorCalculation(); for (int i = 0; i < ideal.Length; i++) { ComputeOutputs(input[i]); errorCalculation.UpdateError(this.outputLayer.Fire, ideal[i]); } return(errorCalculation.CalculateRMS()); }
/// <summary> /// Calculate the error for this neural network. The error is calculated /// using root-mean-square(RMS). /// </summary> /// <param name="data">The training set.</param> /// <returns>The error percentage.</returns> public double CalculateError(INeuralDataSet data) { ClearContext(); ErrorCalculation errorCalculation = new ErrorCalculation(); foreach (INeuralDataPair pair in data) { INeuralData actual = Compute(pair.Input); errorCalculation.UpdateError(actual.Data, pair.Ideal.Data); } return(errorCalculation.Calculate()); }
/// <summary> /// Calculate the SSE error. /// </summary> /// <returns>The SSE error with the current weights.</returns> private double CalculateError() { var result = new ErrorCalculation(); for (int i = 0; i < _trainingLength; i++) { _indexableTraining.GetRecord(i, _pair); IMLData actual = _network.Compute(_pair.Input); result.UpdateError(actual.Data, _pair.Ideal.Data, _pair.Significance); } return(result.CalculateSSE()); }
/// <summary> /// Calculate the SSE error. /// </summary> /// <returns>The SSE error with the current weights.</returns> private double CalculateError() { var result = new ErrorCalculation(); for (int i = 0; i < _trainingLength; i++) { var pair = _indexableTraining[i]; var actual = _network.Compute(pair.Input); result.UpdateError(actual, pair.Ideal, pair.Significance); } return(result.CalculateSSE()); }
public GradientWorker(FlatNetwork theNetwork, TrainFlatNetworkProp theOwner, IMLDataSet theTraining, int theLow, int theHigh, double[] theFlatSpots, IErrorFunction ef) { goto Label_0155; Label_0114: this._x071bde1041617fce = theOwner; this._x0ba854627e1326f9 = theFlatSpots; this._x58c3d5da5c5c72db = new double[this._x87a7fc6a72741c2e.LayerOutput.Length]; this._xe05127febf8b7904 = new double[this._x87a7fc6a72741c2e.Weights.Length]; this._xd505507cf33ae543 = new double[this._x87a7fc6a72741c2e.OutputCount]; if (0 == 0) { this._x2f33d779e5a20b28 = this._x87a7fc6a72741c2e.Weights; if ((((uint) theHigh) + ((uint) theLow)) <= uint.MaxValue) { this._xb25095f37f20a1c1 = this._x87a7fc6a72741c2e.LayerIndex; if (((uint) theLow) <= uint.MaxValue) { this._xe05f7b8f952f0ba4 = this._x87a7fc6a72741c2e.LayerCounts; this._x7d5bf19d36074a85 = this._x87a7fc6a72741c2e.WeightIndex; this._x5e72e5e601f79c78 = this._x87a7fc6a72741c2e.LayerOutput; this._x59e01312f2f4aa96 = this._x87a7fc6a72741c2e.LayerSums; this._xc99b49dd213196ca = this._x87a7fc6a72741c2e.LayerFeedCounts; this._x2cb049236d33bbda = ef; } } } this._x61830ac74d65acc3 = BasicMLDataPair.CreatePair(this._x87a7fc6a72741c2e.InputCount, this._x87a7fc6a72741c2e.OutputCount); if (0 == 0) { return; } Label_0155: this._x84e81691256999b2 = new ErrorCalculation(); this._x87a7fc6a72741c2e = theNetwork; this._x823a2b9c8bf459c5 = theTraining; if (0xff == 0) { return; } do { if ((((uint) theHigh) + ((uint) theLow)) > uint.MaxValue) { goto Label_0114; } this._xd12d1dba8a023d95 = theLow; } while (0 != 0); this._x628ea9b89457a2a9 = theHigh; goto Label_0114; }
public static double CalculateRegressionError(IMLRegression method, IMLDataSet data) { ErrorCalculation calculation = new ErrorCalculation(); if (method is IMLContext) { ((IMLContext) method).ClearContext(); } foreach (IMLDataPair pair in data) { IMLData data2 = method.Compute(pair.Input); calculation.UpdateError(data2.Data, pair.Ideal.Data, pair.Significance); } return calculation.Calculate(); }
/// <summary> /// Calculate a regression error. /// </summary> /// <param name="method">The method to check.</param> /// <param name="data">The data to check.</param> /// <returns>The error.</returns> public static double CalculateRegressionError(IMLRegression method, IMLDataSet data) { var errorCalculation = new ErrorCalculation(); if (method is IMLContext) ((IMLContext)method).ClearContext(); foreach (IMLDataPair pair in data) { IMLData actual = method.Compute(pair.Input); errorCalculation.UpdateError(actual, pair.Ideal, pair.Significance); } return errorCalculation.Calculate(); }
public static void Compare(string aFolder, DTWConfiguration aDTWConfig, ref StreamWriter aSWriter) { List <Signature> lSignatures = SignatureFileUtils.GetAllSignaturesFromFolder(aFolder); List <Signature> lTemplate = lSignatures.Take(5).ToList(); List <Signature> lOriginalSignatures = lSignatures.Skip(5).Take(13).ToList(); List <Signature> lImpostorSignatures = lSignatures.Skip(18).Take(9).ToList(); for (int i = 0; i < lTemplate.Count; ++i) { var lElement = lTemplate.ElementAt(i); var lNewElement = SignatureUtils.SignatureUtils.CalculateCharacteristics(lElement); lNewElement = SignatureUtils.SignatureUtils.StandardizeSignature(lNewElement); lTemplate.RemoveAt(i); lTemplate.Insert(i, lNewElement); } for (int i = 0; i < lOriginalSignatures.Count; ++i) { var lElement = lOriginalSignatures.ElementAt(i); var lNewElement = SignatureUtils.SignatureUtils.CalculateCharacteristics(lElement); lNewElement = SignatureUtils.SignatureUtils.StandardizeSignature(lNewElement); lOriginalSignatures.RemoveAt(i); lOriginalSignatures.Insert(i, lNewElement); } for (int i = 0; i < lImpostorSignatures.Count; ++i) { var lElement = lImpostorSignatures.ElementAt(i); var lNewElement = SignatureUtils.SignatureUtils.CalculateCharacteristics(lElement); lNewElement = SignatureUtils.SignatureUtils.StandardizeSignature(lNewElement); lImpostorSignatures.RemoveAt(i); lImpostorSignatures.Insert(i, lNewElement); } List <double> lOriginalScores = SignatureUtils.SignatureUtils.CompareSignaturesDTW(lTemplate, lOriginalSignatures, aDTWConfig); List <double> lImpostorScores = SignatureUtils.SignatureUtils.CompareSignaturesDTW(lTemplate, lImpostorSignatures, aDTWConfig); ErrorCalculation lError = ErrorCalculationFactory.GetDScoreErrorCalculator(); lError.CalculateErrors(lOriginalScores, lImpostorScores, 100); aSWriter.WriteLine(lError.GetERR()); }
/// <summary> /// Calculate the error for this neural network. The error is calculated /// using root-mean-square(RMS). /// </summary> /// /// <param name="data">The training set.</param> /// <returns>The error percentage.</returns> public double CalculateError(IMLDataSet data) { var errorCalculation = new ErrorCalculation(); var actual = new double[_outputCount]; IMLDataPair pair; for (int i = 0; i < data.Count; i++) { pair = data[i]; Compute(pair.Input, actual); errorCalculation.UpdateError(actual, pair.Ideal, pair.Significance); } return(errorCalculation.Calculate()); }
/// <summary> /// Calculate the error for this neural network. The error is calculated /// using root-mean-square(RMS). /// </summary> /// /// <param name="data">The training set.</param> /// <returns>The error percentage.</returns> public double CalculateError(IEngineIndexableSet data) { ErrorCalculation errorCalculation = new ErrorCalculation(); double[] actual = new double[this.outputCount]; IEngineData pair = BasicEngineData.CreatePair(data.InputSize, data.IdealSize); for (int i = 0; i < data.Count; i++) { data.GetRecord(i, pair); Compute(pair.InputArray, actual); errorCalculation.UpdateError(actual, pair.IdealArray); } return(errorCalculation.Calculate()); }
/// <summary> /// Calculate the error for this neural network. The error is calculated /// using root-mean-square(RMS). /// </summary> /// /// <param name="data">The training set.</param> /// <returns>The error percentage.</returns> public double CalculateError(IMLDataSet data) { var errorCalculation = new ErrorCalculation(); var actual = new double[_outputCount]; IMLDataPair pair = BasicMLDataPair.CreatePair(data.InputSize, data.IdealSize); for (int i = 0; i < data.Count; i++) { data.GetRecord(i, pair); Compute(pair.InputArray, actual); errorCalculation.UpdateError(actual, pair.IdealArray, pair.Significance); } return(errorCalculation.Calculate()); }
/// <summary> /// Process training batches. /// </summary> protected void ProcessBatches() { int lastLearn = 0; var errorCalc = new ErrorCalculation(); _visited.Clear(); foreach (IMLDataPair pair in _training) { var input = pair.Input; var ideal = pair.Ideal; var actual = _network.Compute(input); var sig = pair.Significance; errorCalc.UpdateError(actual, ideal, sig); for (int i = 0; i < _network.OutputCount; i++) { double diff = (ideal[i] - actual[i]) * sig; IFreeformNeuron neuron = _network.OutputLayer.Neurons[i]; CalculateOutputDelta(neuron, diff); CalculateNeuronGradient(neuron); } // Are we at the end of a batch. lastLearn++; if (lastLearn >= BatchSize) { lastLearn = 0; Learn(); } } // Handle any remaining data. if (lastLearn > 0) { Learn(); } // Set the overall error. Error = errorCalc.Calculate(); }
public override sealed void Iteration() { if (this._mustInit) this.InitWeight(); ErrorCalculation errorCalculation = new ErrorCalculation(); foreach (IMLDataPair mlDataPair in this._training) { IMLData instar = this._network.ComputeInstar(mlDataPair.Input); int row = EngineArray.IndexOfLargest(instar); for (int col = 0; col < this._network.OutstarCount; ++col) { double value_ren = this._learningRate * (mlDataPair.Ideal[col] - this._network.WeightsInstarToOutstar[row, col]); this._network.WeightsInstarToOutstar.Add(row, col, value_ren); } IMLData outstar = this._network.ComputeOutstar(instar); errorCalculation.UpdateError(outstar, mlDataPair.Ideal, mlDataPair.Significance); } this.Error = errorCalculation.Calculate(); }
private void display() { double[] input = new double[SineWave.INPUT_SIZE]; double[] output = new double[SineWave.OUTPUT_SIZE]; for (int i = SineWave.INPUT_SIZE; i < SineWave.ACTUAL_SIZE; i++) { this.actual.getInputData(i - SineWave.INPUT_SIZE, input); this.actual.getOutputData(i - SineWave.INPUT_SIZE, output); StringBuilder str = new StringBuilder(); str.Append(i); str.Append(":Actual="); for (int j = 0; j < output.Length; j++) { if (j > 0) { str.Append(','); } str.Append(output[j]); } double[] predict = this.network.ComputeOutputs(input); str.Append(":Predicted="); for (int j = 0; j < output.Length; j++) { if (j > 0) { str.Append(','); } str.Append(predict[j]); } str.Append(":Difference="); ErrorCalculation error = new ErrorCalculation(); error.UpdateError(predict, output); str.Append(error.CalculateRMS().ToString("N2")); Console.WriteLine(str.ToString()); } }
public StochasticGradientDescent(IContainsFlat network, IMLDataSet training, IGenerateRandom theRandom) : base(TrainingImplementationType.Iterative) { Training = training; UpdateRule = new AdamUpdate(); if (!(training is BatchDataSet)) { BatchSize = 25; } _method = network; _flat = network.Flat; _layerDelta = new double[_flat.LayerOutput.Length]; _gradients = new double[_flat.Weights.Length]; _errorCalculation = new ErrorCalculation(); _rnd = theRandom; LearningRate = 0.001; Momentum = 0.9; }
/// <inheritdoc/> public override sealed void Iteration() { var errorCalculation = new ErrorCalculation(); foreach (IMLDataPair pair in _training) { // calculate the error IMLData output = _network.Compute(pair.Input); for (int currentAdaline = 0; currentAdaline < output.Count; currentAdaline++) { double diff = pair.Ideal[currentAdaline] - output[currentAdaline]; // weights for (int i = 0; i <= _network.InputCount; i++) { double input; if (i == _network.InputCount) { input = 1.0d; } else { input = pair.Input[i]; } _network.AddWeight(0, i, currentAdaline, _learningRate * diff * input); } } errorCalculation.UpdateError(output.Data, pair.Ideal.Data, pair.Significance); } // set the global error Error = errorCalculation.Calculate(); }
public void display() { double[] present = new double[INPUT_SIZE * 2]; double[] actualOutput = new double[OUTPUT_SIZE]; int index = 0; foreach (FinancialSample sample in actual.getSamples()) { if (sample.getDate().CompareTo(PREDICT_FROM) > 0) { StringBuilder str = new StringBuilder(); str.Append(sample.getDate()); str.Append(":Start="); str.Append(sample.getAmount()); actual.getInputData(index - INPUT_SIZE, present); actual.getOutputData(index - INPUT_SIZE, actualOutput); IMLData data = new BasicMLData(present); IMLData Output = network.Compute(data); double[] predict = Output.Data; str.Append(",Actual % Change="); str.Append(actualOutput[0].ToString("N2")); str.Append(",Predicted % Change= "); str.Append(predict[0].ToString("N2")); str.Append(":Difference="); ErrorCalculation error = new ErrorCalculation(); error.UpdateError(Output.Data, actualOutput, 1); str.Append(error.CalculateRMS().ToString("N2")); // Console.WriteLine(str.ToString()); } index++; } }
/// <summary> /// Perform a training iteration. /// </summary> public override void Iteration() { ErrorCalculation errorCalculation = new ErrorCalculation(); ILayer inputLayer = network.GetLayer(BasicNetwork.TAG_INPUT); ILayer outputLayer = network.GetLayer(BasicNetwork.TAG_OUTPUT); foreach (INeuralDataPair pair in this.training) { // calculate the error INeuralData output = this.network.Compute(pair.Input); for (int currentAdaline = 0; currentAdaline < output.Count; currentAdaline++) { double diff = pair.Ideal[currentAdaline] - output[currentAdaline]; // weights for (int i = 0; i < inputLayer .NeuronCount; i++) { double input = pair.Input[i]; synapse.WeightMatrix.Add(i, currentAdaline, learningRate * diff * input); } // bias double t = outputLayer.BiasWeights[ currentAdaline]; t += learningRate * diff; outputLayer.BiasWeights[currentAdaline] = t; } errorCalculation.UpdateError(output.Data, pair.Ideal.Data); } // set the global error this.Error = errorCalculation.Calculate(); }
public void validate_bidirectional_analog_ATotal() { var analyticSolutionRight = BidirectionalAnalyticSolutions.GetBidirectionalRadianceIntegratedOverInterval( _slabThickness, new OpticalProperties(_mua, _musp, _g, 1.0), 1, 0, _slabThickness); var analyticSolutionLeft = BidirectionalAnalyticSolutions.GetBidirectionalRadianceIntegratedOverInterval( _slabThickness, new OpticalProperties(_mua, _musp, _g, 1.0), -1, 0, _slabThickness); // take sum because absorbed energy independent of direction var analyticSolution = (analyticSolutionRight + analyticSolutionLeft); var sd = ErrorCalculation.StandardDeviation(_output.Input.N, _output.Atot, _output.Atot2); Assert.Less(Math.Abs(_output.Atot - _mua * analyticSolution), 3 * sd); }
/// <summary> /// Construct a gradient worker. /// </summary> /// /// <param name="theNetwork">The network to train.</param> /// <param name="theOwner">The owner that is doing the training.</param> /// <param name="theTraining">The training data.</param> /// <param name="theLow">The low index to use in the training data.</param> /// <param name="theHigh">The high index to use in the training data.</param> /// <param name="theFlatSpots">Holds an array of flat spot constants.</param> public GradientWorker(FlatNetwork theNetwork, TrainFlatNetworkProp theOwner, IMLDataSet theTraining, int theLow, int theHigh, double[] theFlatSpots, IErrorFunction ef) { _errorCalculation = new ErrorCalculation(); _network = theNetwork; _training = theTraining; _low = theLow; _high = theHigh; _owner = theOwner; _flatSpot = theFlatSpots; _layerDelta = new double[_network.LayerOutput.Length]; _gradients = new double[_network.Weights.Length]; _actual = new double[_network.OutputCount]; _weights = _network.Weights; _layerIndex = _network.LayerIndex; _layerCounts = _network.LayerCounts; _weightIndex = _network.WeightIndex; _layerOutput = _network.LayerOutput; _layerSums = _network.LayerSums; _layerFeedCounts = _network.LayerFeedCounts; _ef = ef; _pair = BasicMLDataPair.CreatePair(_network.InputCount, _network.OutputCount); }
public double CalculateError(IMLDataSet data) { double[] numArray; IMLDataPair pair; int num; ErrorCalculation calculation = new ErrorCalculation(); goto Label_0057; Label_0031: num++; Label_0035: if (num < data.Count) { data.GetRecord((long) num, pair); this.Compute(pair.InputArray, numArray); calculation.UpdateError(numArray, pair.IdealArray, pair.Significance); goto Label_0031; } if ((((uint) num) | 8) != 0) { return calculation.Calculate(); } Label_0057: numArray = new double[this._outputCount]; if (0 != 0) { goto Label_0031; } pair = BasicMLDataPair.CreatePair(data.InputSize, data.IdealSize); num = 0; goto Label_0035; }
/// <summary> /// Called internally to compute each output neuron. /// </summary> /// <param name="outputNeuron">The output neuron to compute.</param> private void InternalCompute(int outputNeuron) { int row = 0; var error = new ErrorCalculation(); EngineArray.Fill(derivative, 0); // Loop over every training element foreach (var pair in training) { var networkOutput = network.Compute(pair.Input); double e = pair.Ideal.Data[outputNeuron] - networkOutput[outputNeuron]; error.UpdateError(networkOutput[outputNeuron], pair.Ideal[outputNeuron]); int currentWeight = 0; // loop over the output weights int outputFeedCount = network.GetLayerTotalNeuronCount(network.LayerCount - 2); for (int i = 0; i < network.OutputCount; i++) { for (int j = 0; j < outputFeedCount; j++) { double jc; if (i == outputNeuron) { jc = ComputeDerivative(pair.Input, outputNeuron, currentWeight, _dStep, networkOutput[outputNeuron], row); } else { jc = 0; } gradients[currentWeight] += jc*e; derivative[currentWeight] += jc; currentWeight++; } } // Loop over every weight in the neural network while (currentWeight < network.Flat.Weights.Length) { double jc = ComputeDerivative( pair.Input, outputNeuron, currentWeight, _dStep, networkOutput[outputNeuron], row); derivative[currentWeight] += jc; gradients[currentWeight] += jc*e; currentWeight++; } row++; } UpdateHessian(derivative); sse += error.CalculateSSE(); }
/// <inheritdoc /> public override sealed void Iteration() { if (_mustInit) { InitWeight(); } var error = new ErrorCalculation(); foreach (IMLDataPair pair in _training) { IMLData xout = _network.ComputeInstar(pair.Input); int j = EngineArray.IndexOfLargest(xout.Data); for (int i = 0; i < _network.OutstarCount; i++) { double delta = _learningRate *(pair.Ideal[i] - _network.WeightsInstarToOutstar[j, i]); _network.WeightsInstarToOutstar.Add(j, i, delta); } IMLData out2 = _network.ComputeOutstar(xout); error.UpdateError(out2.Data, pair.Ideal.Data, pair.Significance); } Error = error.Calculate(); }
/// <summary> /// Calculate the error for this neural network. The error is calculated /// using root-mean-square(RMS). /// </summary> /// /// <param name="data">The training set.</param> /// <returns>The error percentage.</returns> public double CalculateError(IMLDataSet data) { var errorCalculation = new ErrorCalculation(); var actual = new double[_outputCount]; IMLDataPair pair = BasicMLDataPair.CreatePair(data.InputSize, data.IdealSize); for (int i = 0; i < data.Count; i++) { data.GetRecord(i, pair); Compute(pair.InputArray, actual); errorCalculation.UpdateError(actual, pair.IdealArray,pair.Significance); } return errorCalculation.Calculate(); }
public void display() { double[] present = new double[INPUT_SIZE*2]; double[] actualOutput = new double[OUTPUT_SIZE]; int index = 0; foreach (FinancialSample sample in actual.getSamples()) { if (sample.getDate().CompareTo(PREDICT_FROM) > 0) { StringBuilder str = new StringBuilder(); str.Append(sample.getDate()); str.Append(":Start="); str.Append(sample.getAmount()); actual.getInputData(index - INPUT_SIZE, present); actual.getOutputData(index - INPUT_SIZE, actualOutput); IMLData data = new BasicMLData(present); IMLData Output = network.Compute(data); double[] predict = Output.Data; str.Append(",Actual % Change="); str.Append(actualOutput[0].ToString("N2")); str.Append(",Predicted % Change= "); str.Append(predict[0].ToString("N2")); str.Append(":Difference="); ErrorCalculation error = new ErrorCalculation(); error.UpdateError(Output.Data, actualOutput, 1); str.Append(error.CalculateRMS().ToString("N2")); // Console.WriteLine(str.ToString()); } index++; } }
/// <summary> /// Calculate the error for this neural network. The error is calculated /// using root-mean-square(RMS). /// </summary> /// /// <param name="data">The training set.</param> /// <returns>The error percentage.</returns> public double CalculateError(IMLDataSet data) { var errorCalculation = new ErrorCalculation(); var actual = new double[_outputCount]; for (int i = 0; i < data.Count; i++) { IMLDataPair pair = data[i]; Compute(pair.Input, actual); errorCalculation.UpdateError(actual, pair.Ideal, pair.Significance); } return errorCalculation.Calculate(); }
/// <summary> /// Calculate the error for this neural network. The error is calculated /// using root-mean-square(RMS). /// </summary> /// <param name="data">The training set.</param> /// <returns>The error percentage.</returns> public double CalculateError(INeuralDataSet data) { ClearContext(); ErrorCalculation errorCalculation = new ErrorCalculation(); foreach (INeuralDataPair pair in data) { INeuralData actual = Compute(pair.Input); errorCalculation.UpdateError(actual.Data, pair.Ideal.Data); } return errorCalculation.Calculate(); }
/// <summary> /// Evaluate the error for the specified model. /// </summary> /// /// <param name="param">The params for the SVN.</param> /// <param name="prob">The problem to evaluate.</param> /// <param name="target">The output values from the SVN.</param> /// <returns>The calculated error.</returns> private static double Evaluate(svm_parameter param, svm_problem prob, double[] target) { int totalCorrect = 0; var error = new ErrorCalculation(); if ((param.svm_type == svm_parameter.EPSILON_SVR) || (param.svm_type == svm_parameter.NU_SVR)) { for (int i = 0; i < prob.l; i++) { double ideal = prob.y[i]; double actual = target[i]; error.UpdateError(actual, ideal); } return error.Calculate(); } for (int i = 0; i < prob.l; i++) { if (target[i] == prob.y[i]) { ++totalCorrect; } } return Format.HundredPercent*totalCorrect/prob.l; }