コード例 #1
0
ファイル: RidgeRegrTrainer.cs プロジェクト: thild/NET
        /// <summary>
        /// Performs training iteration.
        /// </summary>
        public bool Iteration()
        {
            //Primary stop condition
            if (AttemptEpoch == MaxAttemptEpoch)
            {
                return(false);
            }
            //New lambda to be tested
            double newLambda = _lambdaSeeker.Next;

            //Secondary stop condition
            if (AttemptEpoch > 0 && Math.Abs(_currLambda - newLambda) < StopLambdaDifference)
            {
                return(false);
            }
            //Next epoch allowed
            _currLambda = newLambda;
            ++AttemptEpoch;
            InfoMessage = $"lambda={_currLambda.ToString(CultureInfo.InvariantCulture)}";
            //Inverse _XTdotX matrix
            Matrix I;

            if (_currLambda > 0)
            {
                Matrix B   = new Matrix(_XTdotX);
                double tmp = B.Data[0][0];
                B.AddScalarToDiagonal(_currLambda);
                B.Data[0][0] = tmp;
                I            = B.Inverse(true);
            }
            else
            {
                I = _XTdotX.Inverse(true);
            }


            //New weights buffer
            double[] newWeights = new double[_net.NumOfWeights];
            //Weights for each output neuron
            for (int outputIdx = 0; outputIdx < _net.NumOfOutputValues; outputIdx++)
            {
                //Weights solution
                Vector weights = I * _XTdotY[outputIdx];
                //Store weights
                //Bias
                newWeights[_net.NumOfOutputValues * _net.NumOfInputValues + outputIdx] = weights.Data[0];
                //Predictors
                for (int i = 0; i < _net.NumOfInputValues; i++)
                {
                    newWeights[outputIdx * _net.NumOfInputValues + i] = weights.Data[i + 1];
                }
            }
            //Set new weights and compute error
            _net.SetWeights(newWeights);
            MSE = _net.ComputeBatchErrorStat(_inputVectorCollection, _outputVectorCollection).MeanSquare;
            //Update lambda seeker
            _lambdaSeeker.ProcessError(MSE);
            return(true);
        }
コード例 #2
0
ファイル: ElasticRegrTrainer.cs プロジェクト: okozelsk/NET
 /// <inheritdoc/>
 public bool Iteration()
 {
     //Primary stop condition
     if (AttemptEpoch == MaxAttemptEpoch)
     {
         return(false);
     }
     //Next epoch allowed
     ++AttemptEpoch;
     InfoMessage = $"Lambda={_cfg.Lambda.ToString(CultureInfo.InvariantCulture)} Alpha={_cfg.Alpha.ToString(CultureInfo.InvariantCulture)}";
     //Whole network new weights buffer
     double[] newWeights = _net.GetWeightsCopy();
     //Optimization of the weights for each output separeatelly
     for (int outputIdx = 0; outputIdx < _net.NumOfOutputValues; outputIdx++)
     {
         //New weights for specific output neuron
         double[] weights = new double[_net.NumOfInputValues + 1];
         //Copy weights and compute the sum
         //Bias first
         weights[0] = newWeights[_net.NumOfOutputValues * _net.NumOfInputValues + outputIdx];
         //Inputs next
         Parallel.For(0, _net.NumOfInputValues, i =>
         {
             weights[i + 1] = newWeights[outputIdx * _net.NumOfInputValues + i];
         });
         //Elastic iteration
         //Compute and store current output values and new bias
         double   oldBias             = weights[0];
         double   newBias             = 0;
         double[] parallelSubResults1 = new double[_parallelRanges.Count];
         double[] parallelSubResults2 = new double[_parallelRanges.Count];
         double[] computedOutputs     = new double[_outputVectorCollection.Count];
         parallelSubResults1.Populate(0);
         Parallel.For(0, _parallelRanges.Count, rangeIdx =>
         {
             for (int i = _parallelRanges[rangeIdx].Item1; i < _parallelRanges[rangeIdx].Item2; i++)
             {
                 computedOutputs[i]             = ComputeLinOutput(i, weights);
                 parallelSubResults1[rangeIdx] += (_outputVectorCollection[i][outputIdx] - computedOutputs[i] + oldBias);
             }
         });
         //New bias finalization
         for (int i = 0; i < _parallelRanges.Count; i++)
         {
             newBias += parallelSubResults1[i];
         }
         newBias   /= _outputVectorCollection.Count;
         weights[0] = newBias;
         //Update computed outputs if bias has changed
         double biasDifference = newBias - oldBias;
         if (biasDifference != 0)
         {
             Parallel.For(0, _parallelRanges.Count, rangeIdx =>
             {
                 for (int i = _parallelRanges[rangeIdx].Item1; i < _parallelRanges[rangeIdx].Item2; i++)
                 {
                     computedOutputs[i] += biasDifference;
                 }
             });
         }
         //Optimization
         for (int inputValueIdx = 0; inputValueIdx < _net.NumOfInputValues; inputValueIdx++)
         {
             //Fit and denominator computation
             double oldWeight = weights[1 + inputValueIdx];
             double fit = 0, denominator = 0;
             parallelSubResults1.Populate(0);
             parallelSubResults2.Populate(0);
             Parallel.For(0, _parallelRanges.Count, rangeIdx =>
             {
                 for (int i = _parallelRanges[rangeIdx].Item1; i < _parallelRanges[rangeIdx].Item2; i++)
                 {
                     double x = _inputVectorCollection[i][inputValueIdx];
                     if (x != 0)
                     {
                         parallelSubResults1[rangeIdx] += x * (_outputVectorCollection[i][outputIdx] - computedOutputs[i] + x * oldWeight);
                         parallelSubResults2[rangeIdx] += x * x;
                     }
                 }
             });
             //Fit and denominator finalization
             for (int i = 0; i < _parallelRanges.Count; i++)
             {
                 fit         += parallelSubResults1[i];
                 denominator += parallelSubResults2[i];
             }
             fit         /= _outputVectorCollection.Count;
             denominator /= _outputVectorCollection.Count;
             denominator += _cfg.Lambda * (1d - _cfg.Alpha);
             double newWeight = 0;
             if (denominator != 0)
             {
                 newWeight = SoftThreshold(fit) / denominator;
             }
             //Set new weight
             weights[1 + inputValueIdx] = newWeight;
             //Update computed values
             double weightsDiff = newWeight - oldWeight;
             if (weightsDiff != 0)
             {
                 Parallel.For(0, _parallelRanges.Count, rangeIdx =>
                 {
                     for (int i = _parallelRanges[rangeIdx].Item1; i < _parallelRanges[rangeIdx].Item2; i++)
                     {
                         double x = _inputVectorCollection[i][inputValueIdx];
                         if (x != 0)
                         {
                             computedOutputs[i] += weightsDiff * x;
                         }
                     }
                 });
             }
         }
         //Put optimized weights back to newWaights buffer
         //Bias
         newWeights[_net.NumOfOutputValues * _net.NumOfInputValues + outputIdx] = weights[0];
         //Inputs
         for (int i = 0; i < _net.NumOfInputValues; i++)
         {
             newWeights[outputIdx * _net.NumOfInputValues + i] = weights[i + 1];
         }
     }
     //Set new weights and compute final error
     _net.SetWeights(newWeights);
     MSE = _net.ComputeBatchErrorStat(_inputVectorCollection, _outputVectorCollection).MeanSquare;
     return(true);
 }
コード例 #3
0
ファイル: QRDRegrTrainer.cs プロジェクト: thild/NET
        /// <summary>
        /// Performs training iteration.
        /// </summary>
        public bool Iteration()
        {
            //Fetch next noise intensity
            double newNoise = _noiseSeeker.Next;

            //Check continue conditions
            if (AttemptEpoch == MaxAttemptEpoch || Math.Abs(_currNoise - newNoise) < StopNoiseDifference)
            {
                //Try new attempt
                if (!NextAttempt())
                {
                    //Next attempt is not available
                    return(false);
                }
                else
                {
                    newNoise = _noiseSeeker.Next;
                }
            }
            //Next epoch
            ++AttemptEpoch;
            _currNoise  = newNoise;
            InfoMessage = $"noise={_currNoise.ToString(CultureInfo.InvariantCulture)}";
            //Adjusted predictors
            Matrix predictors = PreparePredictors(_currNoise);
            //Decomposition
            QRD  decomposition = null;
            bool useableQRD    = true;

            try
            {
                //Try to create QRD. Any exception signals numerical unstability
                decomposition = new QRD(predictors);
            }
            catch
            {
                //Creation of QRD object throws exception. QRD object is not ready for use.
                useableQRD = false;
                if (AttemptEpoch == 1)
                {
                    //No previous successful epoch so stop training
                    throw;
                }
            }
            if (useableQRD)
            {
                //QRD is ready for use (low probability of numerical unstability)
                //New weights
                double[] newWeights = new double[_net.NumOfWeights];
                //Regression for each output neuron
                for (int outputIdx = 0; outputIdx < _net.NumOfOutputValues; outputIdx++)
                {
                    //Regression
                    Matrix solution = decomposition.Solve(_outputSingleColMatrixCollection[outputIdx]);
                    //Store weights
                    //Input weights
                    for (int i = 0; i < solution.NumOfRows - 1; i++)
                    {
                        newWeights[outputIdx * _net.NumOfInputValues + i] = solution.Data[i][0];
                    }
                    //Bias weight
                    newWeights[_net.NumOfOutputValues * _net.NumOfInputValues + outputIdx] = solution.Data[solution.NumOfRows - 1][0];
                }
                //Set new weights and compute error
                _net.SetWeights(newWeights);
                MSE = _net.ComputeBatchErrorStat(_inputVectorCollection, _outputVectorCollection).MeanSquare;
            }
            _noiseSeeker.ProcessError(MSE);
            return(true);
        }