Esempio n. 1
0
 /// <summary>
 /// Starts next training attempt
 /// </summary>
 public bool NextAttempt()
 {
     if (Attempt < MaxAttempt)
     {
         //Next attempt is allowed
         ++Attempt;
         //Reset
         _maxNoiseSeeker = new ParamSeeker(_settings.MaxNoiseSeekerCfg);
         _currMaxNoise   = 1e6;
         MSE             = 0;
         AttemptEpoch    = 0;
         return(true);
     }
     else
     {
         //Max attempt reached -> do nothhing and return false
         return(false);
     }
 }
Esempio n. 2
0
        //Constructor
        /// <summary>
        /// Constructs an initialized instance
        /// </summary>
        /// <param name="net">FF network to be trained</param>
        /// <param name="inputVectorCollection">Predictors (input)</param>
        /// <param name="outputVectorCollection">Ideal outputs (the same number of rows as number of inputs)</param>
        /// <param name="settings">Optional startup parameters of the trainer</param>
        public RidgeRegrTrainer(FeedForwardNetwork net,
                                List <double[]> inputVectorCollection,
                                List <double[]> outputVectorCollection,
                                RidgeRegrTrainerSettings settings
                                )
        {
            //Check network readyness
            if (!net.Finalized)
            {
                throw new InvalidOperationException($"Can´t create trainer. Network structure was not finalized.");
            }
            //Check network conditions
            if (net.LayerCollection.Count != 1 || !(net.LayerCollection[0].Activation is Identity))
            {
                throw new InvalidOperationException($"Can´t create trainer. Network structure is not complient (single layer having Identity activation).");
            }
            //Check samples conditions
            if (inputVectorCollection.Count == 0)
            {
                throw new InvalidOperationException($"Can´t create trainer. Missing training samples.");
            }
            //Collections
            _inputVectorCollection  = new List <double[]>(inputVectorCollection);
            _outputVectorCollection = new List <double[]>(outputVectorCollection);
            //Parameters
            _settings       = settings;
            MaxAttempt      = _settings.NumOfAttempts;
            MaxAttemptEpoch = _settings.NumOfAttemptEpochs;
            Attempt         = 1;
            AttemptEpoch    = 0;
            _net            = net;
            _outputSingleColVectorCollection = new List <Vector>(_net.NumOfOutputValues);
            for (int outputIdx = 0; outputIdx < _net.NumOfOutputValues; outputIdx++)
            {
                Vector outputSingleColVector = new Vector(outputVectorCollection.Count);
                for (int row = 0; row < outputVectorCollection.Count; row++)
                {
                    //Output
                    outputSingleColVector.Data[row] = outputVectorCollection[row][outputIdx];
                }
                _outputSingleColVectorCollection.Add(outputSingleColVector);
            }
            //Lambda seeker
            _lambdaSeeker = new ParamSeeker(_settings.LambdaSeekerCfg);
            _currLambda   = 0;
            //Matrix setup
            Matrix X = new Matrix(inputVectorCollection.Count, _net.NumOfInputValues + 1);

            for (int row = 0; row < inputVectorCollection.Count; row++)
            {
                //Add constant bias
                X.Data[row][0] = 1d;
                //Add predictors
                inputVectorCollection[row].CopyTo(X.Data[row], 1);
            }
            _XT     = X.Transpose();
            _XTdotX = _XT * X;
            _XTdotY = new Vector[_net.NumOfOutputValues];
            for (int outputIdx = 0; outputIdx < _net.NumOfOutputValues; outputIdx++)
            {
                _XTdotY[outputIdx] = _XT * _outputSingleColVectorCollection[outputIdx];
            }
            return;
        }
Esempio n. 3
0
        //Constructor
        /// <summary>
        /// Constructs an initialized instance
        /// </summary>
        /// <param name="net">FF network to be trained</param>
        /// <param name="inputVectorCollection">Predictors (input)</param>
        /// <param name="outputVectorCollection">Ideal outputs (the same number of rows as number of inputs)</param>
        /// <param name="settings">Optional startup parameters of the trainer</param>
        /// <param name="rand">Random object to be used</param>
        public RidgeRegrTrainer(FeedForwardNetwork net,
                                List <double[]> inputVectorCollection,
                                List <double[]> outputVectorCollection,
                                RidgeRegrTrainerSettings settings,
                                Random rand
                                )
        {
            //Check network readyness
            if (!net.Finalized)
            {
                throw new Exception("Can´t create trainer. Network structure was not finalized.");
            }
            //Check network conditions
            if (net.LayerCollection.Count != 1 || !(net.LayerCollection[0].Activation is Identity))
            {
                throw new Exception("Can´t create trainer. Network structure is not complient (single layer having Identity activation).");
            }
            //Check samples conditions
            if (inputVectorCollection.Count == 0)
            {
                throw new Exception("Can´t create trainer. Missing training samples.");
            }
            //Collections
            _inputVectorCollection  = new List <double[]>(inputVectorCollection);
            _outputVectorCollection = new List <double[]>(outputVectorCollection);
            //Parameters
            _settings       = settings;
            MaxAttempt      = _settings.NumOfAttempts;
            MaxAttemptEpoch = _settings.NumOfAttemptEpochs;
            Attempt         = 1;
            AttemptEpoch    = 0;
            _net            = net;
            _rand           = rand;
            _outputSingleColVectorCollection = new List <Vector>(_net.NumOfOutputValues);
            for (int outputIdx = 0; outputIdx < _net.NumOfOutputValues; outputIdx++)
            {
                Vector outputSingleColVector = new Vector(outputVectorCollection.Count);
                for (int row = 0; row < outputVectorCollection.Count; row++)
                {
                    //Output
                    outputSingleColVector.Data[row] = outputVectorCollection[row][outputIdx];
                }
                _outputSingleColVectorCollection.Add(outputSingleColVector);
            }
            //Lambda seeker
            _lambdaSeeker = new ParamSeeker(_settings.LambdaSeekerCfg);
            _currLambda   = 1e6;
            //Matrix setup
            Matrix predictorsMatrix = new Matrix(inputVectorCollection.Count, _net.NumOfInputValues + 1);

            for (int row = 0; row < inputVectorCollection.Count; row++)
            {
                //Predictors
                for (int col = 0; col < _net.NumOfInputValues; col++)
                {
                    predictorsMatrix.Data[row][col] = inputVectorCollection[row][col];
                }
                //Add constant bias to predictors
                predictorsMatrix.Data[row][_net.NumOfInputValues] = 1;
            }
            _transposedPredictorsMatrix = predictorsMatrix.Transpose();
            _baseSquareMatrix           = _transposedPredictorsMatrix * predictorsMatrix;
            return;
        }