/// <inheritdoc />
        public override sealed void Iteration()
        {
            if (_mustInit)
            {
                InitWeight();
            }

            var error = new ErrorCalculation();


            foreach (IMLDataPair pair  in  _training)
            {
                IMLData xout = _network.ComputeInstar(pair.Input);

                int j = EngineArray.IndexOfLargest(xout);
                for (int i = 0; i < _network.OutstarCount; i++)
                {
                    double delta = _learningRate
                                   * (pair.Ideal[i] - _network.WeightsInstarToOutstar[j, i]);
                    _network.WeightsInstarToOutstar.Add(j, i, delta);
                }

                IMLData out2 = _network.ComputeOutstar(xout);
                error.UpdateError(out2, pair.Ideal, pair.Significance);
            }

            Error = error.Calculate();
        }
Beispiel #2
0
        /// <summary>
        /// Evaluate the error for the specified model.
        /// </summary>
        ///
        /// <param name="param">The params for the SVN.</param>
        /// <param name="prob">The problem to evaluate.</param>
        /// <param name="target">The output values from the SVN.</param>
        /// <returns>The calculated error.</returns>
        private static double Evaluate(svm_parameter param, svm_problem prob,
                                       double[] target)
        {
            int totalCorrect = 0;

            var error = new ErrorCalculation();

            if ((param.svm_type == svm_parameter.EPSILON_SVR) ||
                (param.svm_type == svm_parameter.NU_SVR))
            {
                for (int i = 0; i < prob.l; i++)
                {
                    double ideal  = prob.y[i];
                    double actual = target[i];
                    error.UpdateError(actual, ideal);
                }
                return(error.Calculate());
            }
            for (int i = 0; i < prob.l; i++)
            {
                if (target[i] == prob.y[i])
                {
                    ++totalCorrect;
                }
            }

            return(Format.HundredPercent * totalCorrect / prob.l);
        }
        /// <summary>
        /// Process training for pure batch mode (one single batch).
        /// </summary>
        protected void ProcessPureBatch()
        {
            var errorCalc = new ErrorCalculation();

            _visited.Clear();

            foreach (IMLDataPair pair in _training)
            {
                var input  = pair.Input;
                var ideal  = pair.Ideal;
                var actual = _network.Compute(input);
                var sig    = pair.Significance;

                errorCalc.UpdateError(actual, ideal, sig);

                for (int i = 0; i < _network.OutputCount; i++)
                {
                    var diff = (ideal[i] - actual[i])
                               * sig;
                    IFreeformNeuron neuron = _network.OutputLayer.Neurons[i];
                    CalculateOutputDelta(neuron, diff);
                    CalculateNeuronGradient(neuron);
                }
            }

            // Set the overall error.
            Error = errorCalc.Calculate();

            // Learn for all data.
            Learn();
        }
Beispiel #4
0
        /// <summary>
        /// Evaluate the error for the specified model.
        /// </summary>
        /// <param name="param">The params for the SVN.</param>
        /// <param name="prob">The problem to evaluate.</param>
        /// <param name="target">The output values from the SVN.</param>
        /// <returns>The calculated error.</returns>
        private double Evaluate(svm_parameter param, svm_problem prob,
                                double[] target)
        {
            int total_correct = 0;

            ErrorCalculation error = new ErrorCalculation();

            if (param.svm_type == svm_parameter.EPSILON_SVR ||
                param.svm_type == svm_parameter.NU_SVR)
            {
                for (int i = 0; i < prob.l; i++)
                {
                    double ideal  = prob.y[i];
                    double actual = target[i];
                    error.UpdateError(actual, ideal);
                }
                return(error.Calculate());
            }
            else
            {
                for (int i = 0; i < prob.l; i++)
                {
                    if (target[i] == prob.y[i])
                    {
                        ++total_correct;
                    }
                }

                return(100.0 * total_correct / prob.l);
            }
        }
        /// <summary>
        /// Perform one training iteration.
        /// </summary>
        public override void Iteration()
        {
            if (this.mustInit)
            {
                InitWeight();
            }

            ErrorCalculation error = new ErrorCalculation();

            foreach (INeuralDataPair pair in this.training)
            {
                INeuralData output = this.parts.InstarSynapse.Compute(
                    pair.Input);
                int j = this.parts.Winner(output);
                for (int i = 0; i < this.parts.OutstarLayer.NeuronCount; i++)
                {
                    double delta = this.learningRate
                                   * (pair.Ideal[i] - this.parts
                                      .OutstarSynapse.WeightMatrix[j, i]);
                    this.parts.OutstarSynapse.WeightMatrix.Add(j, i, delta);
                }

                error.UpdateError(output.Data, pair.Ideal.Data);
            }

            this.Error = error.Calculate();
        }
Beispiel #6
0
        /// <summary>
        /// Calculate the error for this neural network. The error is calculated
        /// using root-mean-square(RMS).
        /// </summary>
        /// <param name="data">The training set.</param>
        /// <returns>The error percentage.</returns>
        public double CalculateError(INeuralDataSet data)
        {
            ClearContext();
            ErrorCalculation errorCalculation = new ErrorCalculation();

            foreach (INeuralDataPair pair in data)
            {
                INeuralData actual = Compute(pair.Input);
                errorCalculation.UpdateError(actual.Data, pair.Ideal.Data);
            }
            return(errorCalculation.Calculate());
        }
Beispiel #7
0
        /// <summary>
        /// Calculate a regression error.
        /// </summary>
        /// <param name="method">The method to check.</param>
        /// <param name="data">The data to check.</param>
        /// <returns>The error.</returns>
        public static double CalculateRegressionError(IMLRegression method,
                                                      IMLDataSet data)
        {
            var errorCalculation = new ErrorCalculation();
            if (method is IMLContext)
                ((IMLContext)method).ClearContext();

            foreach (IMLDataPair pair in data)
            {
                IMLData actual = method.Compute(pair.Input);
                errorCalculation.UpdateError(actual, pair.Ideal, pair.Significance);
            }
            return errorCalculation.Calculate();
        }
        /// <summary>
        /// Calculate the error for this neural network. The error is calculated
        /// using root-mean-square(RMS).
        /// </summary>
        ///
        /// <param name="data">The training set.</param>
        /// <returns>The error percentage.</returns>
        public double CalculateError(IMLDataSet data)
        {
            var errorCalculation = new ErrorCalculation();

            var         actual = new double[_outputCount];
            IMLDataPair pair;

            for (int i = 0; i < data.Count; i++)
            {
                pair = data[i];
                Compute(pair.Input, actual);
                errorCalculation.UpdateError(actual, pair.Ideal, pair.Significance);
            }
            return(errorCalculation.Calculate());
        }
Beispiel #9
0
        /// <summary>
        /// Calculate the error for this neural network. The error is calculated
        /// using root-mean-square(RMS).
        /// </summary>
        ///
        /// <param name="data">The training set.</param>
        /// <returns>The error percentage.</returns>
        public double CalculateError(IMLDataSet data)
        {
            var errorCalculation = new ErrorCalculation();

            var         actual = new double[_outputCount];
            IMLDataPair pair   = BasicMLDataPair.CreatePair(data.InputSize,
                                                            data.IdealSize);

            for (int i = 0; i < data.Count; i++)
            {
                data.GetRecord(i, pair);
                Compute(pair.InputArray, actual);
                errorCalculation.UpdateError(actual, pair.IdealArray, pair.Significance);
            }
            return(errorCalculation.Calculate());
        }
Beispiel #10
0
        /// <summary>
        /// Calculate the error for this neural network. The error is calculated
        /// using root-mean-square(RMS).
        /// </summary>
        ///
        /// <param name="data">The training set.</param>
        /// <returns>The error percentage.</returns>
        public double CalculateError(IEngineIndexableSet data)
        {
            ErrorCalculation errorCalculation = new ErrorCalculation();

            double[]    actual = new double[this.outputCount];
            IEngineData pair   = BasicEngineData.CreatePair(data.InputSize,
                                                            data.IdealSize);

            for (int i = 0; i < data.Count; i++)
            {
                data.GetRecord(i, pair);
                Compute(pair.InputArray, actual);
                errorCalculation.UpdateError(actual, pair.IdealArray);
            }
            return(errorCalculation.Calculate());
        }
Beispiel #11
0
 /// <summary>
 /// Perform the gradient calculation for the specified index range.
 /// </summary>
 ///
 public void Run()
 {
     try
     {
         _errorCalculation.Reset();
         for (int i = _low; i <= _high; i++)
         {
             var pair = _training[i];
             Process(pair);
         }
         double error = _errorCalculation.Calculate();
         _owner.Report(_gradients, error, null);
         EngineArray.Fill(_gradients, 0);
     }
     catch (Exception ex)
     {
         _owner.Report(null, 0, ex);
     }
 }
 /// <summary>
 /// Perform the gradient calculation for the specified index range.
 /// </summary>
 ///
 public void Run()
 {
     try
     {
         _errorCalculation.Reset();
         for (int i = _low; i <= _high; i++)
         {
             _training.GetRecord(i, _pair);
             Process(_pair.InputArray, _pair.IdealArray, _pair.Significance);
         }
         double error = _errorCalculation.Calculate();
         _owner.Report(_gradients, error, null);
         EngineArray.Fill(_gradients, 0);
     }
     catch (Exception ex)
     {
         _owner.Report(null, 0, ex);
     }
 }
        /// <summary>
        /// Process training batches.
        /// </summary>
        protected void ProcessBatches()
        {
            int lastLearn = 0;
            var errorCalc = new ErrorCalculation();

            _visited.Clear();

            foreach (IMLDataPair pair in _training)
            {
                var input  = pair.Input;
                var ideal  = pair.Ideal;
                var actual = _network.Compute(input);
                var sig    = pair.Significance;

                errorCalc.UpdateError(actual, ideal, sig);

                for (int i = 0; i < _network.OutputCount; i++)
                {
                    double diff = (ideal[i] - actual[i])
                                  * sig;
                    IFreeformNeuron neuron = _network.OutputLayer.Neurons[i];
                    CalculateOutputDelta(neuron, diff);
                    CalculateNeuronGradient(neuron);
                }

                // Are we at the end of a batch.
                lastLearn++;
                if (lastLearn >= BatchSize)
                {
                    lastLearn = 0;
                    Learn();
                }
            }

            // Handle any remaining data.
            if (lastLearn > 0)
            {
                Learn();
            }

            // Set the overall error.
            Error = errorCalc.Calculate();
        }
        /// <inheritdoc/>
        public override sealed void Iteration()
        {
            var errorCalculation = new ErrorCalculation();


            foreach (IMLDataPair pair  in  _training)
            {
                // calculate the error
                IMLData output = _network.Compute(pair.Input);

                for (int currentAdaline = 0; currentAdaline < output.Count; currentAdaline++)
                {
                    double diff = pair.Ideal[currentAdaline]
                                  - output[currentAdaline];

                    // weights
                    for (int i = 0; i <= _network.InputCount; i++)
                    {
                        double input;

                        if (i == _network.InputCount)
                        {
                            input = 1.0d;
                        }
                        else
                        {
                            input = pair.Input[i];
                        }

                        _network.AddWeight(0, i, currentAdaline,
                                           _learningRate * diff * input);
                    }
                }

                errorCalculation.UpdateError(output.Data, pair.Ideal.Data, pair.Significance);
            }

            // set the global error
            Error = errorCalculation.Calculate();
        }
Beispiel #15
0
        /// <summary>
        /// Perform a training iteration.
        /// </summary>
        public override void Iteration()
        {
            ErrorCalculation errorCalculation = new ErrorCalculation();

            ILayer inputLayer  = network.GetLayer(BasicNetwork.TAG_INPUT);
            ILayer outputLayer = network.GetLayer(BasicNetwork.TAG_OUTPUT);

            foreach (INeuralDataPair pair in this.training)
            {
                // calculate the error
                INeuralData output = this.network.Compute(pair.Input);

                for (int currentAdaline = 0; currentAdaline < output.Count; currentAdaline++)
                {
                    double diff = pair.Ideal[currentAdaline]
                                  - output[currentAdaline];

                    // weights
                    for (int i = 0; i < inputLayer
                         .NeuronCount; i++)
                    {
                        double input = pair.Input[i];
                        synapse.WeightMatrix.Add(i, currentAdaline,
                                                 learningRate * diff * input);
                    }

                    // bias
                    double t = outputLayer.BiasWeights[
                        currentAdaline];
                    t += learningRate * diff;
                    outputLayer.BiasWeights[currentAdaline] = t;
                }

                errorCalculation.UpdateError(output.Data, pair.Ideal.Data);
            }

            // set the global error
            this.Error = errorCalculation.Calculate();
        }
        public void Update()
        {
            if (IterationNumber == 0)
            {
                UpdateRule.Init(this);
            }

            PreIteration();

            UpdateRule.Update(_gradients, _flat.Weights);
            Error = _errorCalculation.Calculate();

            PostIteration();

            EngineArray.Fill(_gradients, 0);
            _errorCalculation.Reset();

            if (Training is BatchDataSet)
            {
                ((BatchDataSet)Training).Advance();
            }
        }