Calculate() 공개 메소드

Returns the root mean square error for a complete training set.
public Calculate ( ) : double
리턴 double
예제 #1
0
 public static double CalculateError(IMLRegression method, IMLDataSet data)
 {
     ErrorCalculation calculation = new ErrorCalculation();
     while (method is IMLContext)
     {
         ((IMLContext) method).ClearContext();
         break;
     }
     foreach (IMLDataPair pair in data)
     {
         IMLData data2 = method.Compute(pair.Input);
         calculation.UpdateError(data2.Data, pair.Ideal.Data, pair.Significance);
     }
     return calculation.Calculate();
 }
        /// <summary>
        /// Calculate an error for a method that uses regression.
        /// </summary>
        /// <param name="method">The method to evaluate.</param>
        /// <param name="data">The training data to evaluate with.</param>
        /// <returns>The error.</returns>
        public static double CalculateError(IMLRegression method,
                                            IMLDataSet data)
        {
            var errorCalculation = new ErrorCalculation();

            // clear context
            if (method is IMLContext)
            {
                ((IMLContext) method).ClearContext();
            }

            // calculate error
            foreach (IMLDataPair pair  in  data)
            {
                IMLData actual = method.Compute(pair.Input);
                errorCalculation.UpdateError(actual, pair.Ideal, pair.Significance);
            }
            return errorCalculation.Calculate();
        }
예제 #3
0
        /// <inheritdoc/>
        public override sealed void Iteration()
        {
            var errorCalculation = new ErrorCalculation();

            foreach (IMLDataPair pair  in  _training)
            {
                // calculate the error
                IMLData output = _network.Compute(pair.Input);

                for (int currentAdaline = 0; currentAdaline < output.Count; currentAdaline++)
                {
                    double diff = pair.Ideal[currentAdaline]
                                  - output[currentAdaline];

                    // weights
                    for (int i = 0; i <= _network.InputCount; i++)
                    {
                        double input;

                        if (i == _network.InputCount)
                        {
                            input = 1.0d;
                        }
                        else
                        {
                            input = pair.Input[i];
                        }

                        _network.AddWeight(0, i, currentAdaline,
                                          _learningRate*diff*input);
                    }
                }

                errorCalculation.UpdateError(output, pair.Ideal, pair.Significance);
            }

            // set the global error
            Error = errorCalculation.Calculate();
        }
예제 #4
0
파일: FlatNetwork.cs 프로젝트: neismit/emds
 public double CalculateError(IMLDataSet data)
 {
     double[] numArray;
     IMLDataPair pair;
     int num;
     ErrorCalculation calculation = new ErrorCalculation();
     goto Label_0057;
     Label_0031:
     num++;
     Label_0035:
     if (num < data.Count)
     {
         data.GetRecord((long) num, pair);
         this.Compute(pair.InputArray, numArray);
         calculation.UpdateError(numArray, pair.IdealArray, pair.Significance);
         goto Label_0031;
     }
     if ((((uint) num) | 8) != 0)
     {
         return calculation.Calculate();
     }
     Label_0057:
     numArray = new double[this._outputCount];
     if (0 != 0)
     {
         goto Label_0031;
     }
     pair = BasicMLDataPair.CreatePair(data.InputSize, data.IdealSize);
     num = 0;
     goto Label_0035;
 }
예제 #5
0
        /// <inheritdoc />
        public override sealed void Iteration()
        {
            if (_mustInit)
            {
                InitWeight();
            }

            var error = new ErrorCalculation();

            foreach (IMLDataPair pair  in  _training)
            {
                IMLData xout = _network.ComputeInstar(pair.Input);

                int j = EngineArray.IndexOfLargest(xout.Data);
                for (int i = 0; i < _network.OutstarCount; i++)
                {
                    double delta = _learningRate
                                   *(pair.Ideal[i] - _network.WeightsInstarToOutstar[j, i]);
                    _network.WeightsInstarToOutstar.Add(j, i, delta);
                }

                IMLData out2 = _network.ComputeOutstar(xout);
                error.UpdateError(out2.Data, pair.Ideal.Data, pair.Significance);
            }

            Error = error.Calculate();
        }
예제 #6
0
 public override sealed void Iteration()
 {
     if (this._x268cb8b20222b0dc)
     {
         this.xabfa4e7d76a2422c();
     }
     ErrorCalculation calculation = new ErrorCalculation();
     using (IEnumerator<IMLDataPair> enumerator = this._x823a2b9c8bf459c5.GetEnumerator())
     {
         IMLDataPair pair;
         IMLData data;
         int num;
         int num2;
         double num3;
         IMLData data2;
         goto Label_005E;
     Label_0023:
         if (num2 < this._x87a7fc6a72741c2e.OutstarCount)
         {
             goto Label_0091;
         }
     Label_0032:
         data2 = this._x87a7fc6a72741c2e.ComputeOutstar(data);
         calculation.UpdateError(data2.Data, pair.Ideal.Data, pair.Significance);
     Label_005E:
         if (!enumerator.MoveNext() && ((((uint) num2) - ((uint) num3)) <= uint.MaxValue))
         {
             goto Label_014D;
         }
         goto Label_00D2;
     Label_0084:
         num2++;
         goto Label_0124;
     Label_0091:
         num3 = this._x9b481c22b6706459 * (pair.Ideal[num2] - this._x87a7fc6a72741c2e.WeightsInstarToOutstar[num, num2]);
         this._x87a7fc6a72741c2e.WeightsInstarToOutstar.Add(num, num2, num3);
         goto Label_0084;
     Label_00D2:
         pair = enumerator.Current;
         data = this._x87a7fc6a72741c2e.ComputeInstar(pair.Input);
         num = EngineArray.IndexOfLargest(data.Data);
         num2 = 0;
         if ((((uint) num2) <= uint.MaxValue) && (-2 == 0))
         {
             goto Label_0032;
         }
         goto Label_0023;
     Label_0124:
         if ((((uint) num) - ((uint) num)) <= uint.MaxValue)
         {
             goto Label_0023;
         }
     }
     Label_014D:
     this.Error = calculation.Calculate();
 }
예제 #7
0
        /// <summary>
        /// Evaluate the error for the specified model.
        /// </summary>
        ///
        /// <param name="param">The params for the SVN.</param>
        /// <param name="prob">The problem to evaluate.</param>
        /// <param name="target">The output values from the SVN.</param>
        /// <returns>The calculated error.</returns>
        private static double Evaluate(svm_parameter param, svm_problem prob,
                                double[] target)
        {
            int totalCorrect = 0;

            var error = new ErrorCalculation();

            if ((param.svm_type == svm_parameter.EPSILON_SVR)
                || (param.svm_type == svm_parameter.NU_SVR))
            {
                for (int i = 0; i < prob.l; i++)
                {
                    double ideal = prob.y[i];
                    double actual = target[i];
                    error.UpdateError(actual, ideal);
                }
                return error.Calculate();
            }
            for (int i = 0; i < prob.l; i++)
            {
                if (target[i] == prob.y[i])
                {
                    ++totalCorrect;
                }
            }

            return Format.HundredPercent*totalCorrect/prob.l;
        }
예제 #8
0
        /// <summary>
        /// Calculate the error for this neural network. The error is calculated
        /// using root-mean-square(RMS).
        /// </summary>
        ///
        /// <param name="data">The training set.</param>
        /// <returns>The error percentage.</returns>
        public double CalculateError(IMLDataSet data)
        {
            var errorCalculation = new ErrorCalculation();

            var actual = new double[_outputCount];

            for (int i = 0; i < data.Count; i++)
            {
                IMLDataPair pair = data[i];
                Compute(pair.Input, actual);
                errorCalculation.UpdateError(actual, pair.Ideal, pair.Significance);
            }
            return errorCalculation.Calculate();
        }
예제 #9
0
        /// <summary>
        /// Calculate the error for this neural network. The error is calculated
        /// using root-mean-square(RMS).
        /// </summary>
        ///
        /// <param name="data">The training set.</param>
        /// <returns>The error percentage.</returns>
        public double CalculateError(IMLDataSet data)
        {
            var errorCalculation = new ErrorCalculation();

            var actual = new double[_outputCount];
            IMLDataPair pair = BasicMLDataPair.CreatePair(data.InputSize,
                                                         data.IdealSize);

            for (int i = 0; i < data.Count; i++)
            {
                data.GetRecord(i, pair);
                Compute(pair.InputArray, actual);
                errorCalculation.UpdateError(actual, pair.IdealArray,pair.Significance);
            }
            return errorCalculation.Calculate();
        }
        /// <summary>
        /// Process training for pure batch mode (one single batch).
        /// </summary>
        protected void ProcessPureBatch()
        {
            var errorCalc = new ErrorCalculation();
            _visited.Clear();

            foreach (IMLDataPair pair in _training) {
            var input = pair.Input;
            var ideal = pair.Ideal;
            var actual = _network.Compute(input);
            var sig = pair.Significance;

            errorCalc.UpdateError(actual, ideal, sig);

            for (int i = 0; i < _network.OutputCount; i++) {
                var diff = (ideal[i] - actual[i])
                        * sig;
                IFreeformNeuron neuron = _network.OutputLayer.Neurons[i];
                CalculateOutputDelta(neuron, diff);
                CalculateNeuronGradient(neuron);
            }
            }

            // Set the overall error.
            Error = errorCalc.Calculate();

            // Learn for all data.
            Learn();
        }
        /// <summary>
        /// Process training batches.
        /// </summary>
        protected void ProcessBatches()
        {
            int lastLearn = 0;
            var errorCalc = new ErrorCalculation();
            _visited.Clear();

            foreach (IMLDataPair pair in _training) {
            var input = pair.Input;
            var ideal = pair.Ideal;
            var actual = _network.Compute(input);
            var sig = pair.Significance;

            errorCalc.UpdateError(actual, ideal, sig);

            for (int i = 0; i < _network.OutputCount; i++) {
                double diff = (ideal[i] - actual[i])
                        * sig;
                IFreeformNeuron neuron = _network.OutputLayer.Neurons[i];
                CalculateOutputDelta(neuron, diff);
                CalculateNeuronGradient(neuron);
            }

            // Are we at the end of a batch.
            lastLearn++;
            if( lastLearn>=BatchSize ) {
                lastLearn = 0;
                Learn();
            }
            }

            // Handle any remaining data.
            if( lastLearn>0 ) {
            Learn();
            }

            // Set the overall error.
            Error = errorCalc.Calculate();
        }
예제 #12
0
 public override sealed void Iteration()
 {
     ErrorCalculation calculation = new ErrorCalculation();
     using (IEnumerator<IMLDataPair> enumerator = this._x823a2b9c8bf459c5.GetEnumerator())
     {
         IMLDataPair pair;
         IMLData data;
         int num;
         double num2;
         int num3;
         double num4;
         goto Label_0032;
     Label_0015:
         calculation.UpdateError(data.Data, pair.Ideal.Data, pair.Significance);
     Label_0032:
         if (enumerator.MoveNext())
         {
             goto Label_010B;
         }
         if ((((uint) num2) - ((uint) num2)) > uint.MaxValue)
         {
             goto Label_0071;
         }
         goto Label_0151;
     Label_0058:
         num3++;
     Label_005E:
         if (num3 <= this._x87a7fc6a72741c2e.InputCount)
         {
             goto Label_00AE;
         }
         num++;
     Label_0071:
         if (num < data.Count)
         {
             goto Label_00ED;
         }
         goto Label_0015;
     Label_0084:
         this._x87a7fc6a72741c2e.AddWeight(0, num3, num, (this._x9b481c22b6706459 * num2) * num4);
         goto Label_0058;
     Label_00A1:
         num4 = 1.0;
         goto Label_0084;
     Label_00AE:
         if (num3 == this._x87a7fc6a72741c2e.InputCount)
         {
             goto Label_00A1;
         }
         num4 = pair.Input[num3];
         if ((((uint) num) + ((uint) num)) <= uint.MaxValue)
         {
             goto Label_0084;
         }
         goto Label_0058;
     Label_00E6:
         num = 0;
         goto Label_0071;
     Label_00ED:
         num2 = pair.Ideal[num] - data[num];
         num3 = 0;
         goto Label_005E;
     Label_010B:
         pair = enumerator.Current;
         if ((((uint) num4) & 0) != 0)
         {
             goto Label_00AE;
         }
         data = this._x87a7fc6a72741c2e.Compute(pair.Input);
         goto Label_00E6;
     }
     Label_0151:
     this.Error = calculation.Calculate();
 }
예제 #13
0
 public override sealed void Iteration()
 {
     if (this._mustInit)
         this.InitWeight();
     ErrorCalculation errorCalculation = new ErrorCalculation();
     foreach (IMLDataPair mlDataPair in this._training)
     {
         IMLData instar = this._network.ComputeInstar(mlDataPair.Input);
         int row = EngineArray.IndexOfLargest(instar);
         for (int col = 0; col < this._network.OutstarCount; ++col)
         {
             double value_ren = this._learningRate * (mlDataPair.Ideal[col] - this._network.WeightsInstarToOutstar[row, col]);
             this._network.WeightsInstarToOutstar.Add(row, col, value_ren);
         }
         IMLData outstar = this._network.ComputeOutstar(instar);
         errorCalculation.UpdateError(outstar, mlDataPair.Ideal, mlDataPair.Significance);
     }
     this.Error = errorCalculation.Calculate();
 }