Ejemplo n.º 1
0
        /// <summary>
        ///     Perform the training.
        /// </summary>
        /// <param name="train">The training method.</param>
        /// <param name="method">The ML method.</param>
        /// <param name="trainingSet">The training set.</param>
        private void PerformTraining(IMLTrain train, IMLMethod method,
                                     IMLDataSet trainingSet)
        {
            ValidateNetwork.ValidateMethodToData(method, trainingSet);
            double targetError = Prop.GetPropertyDouble(
                ScriptProperties.MlTrainTargetError);

            Analyst.ReportTrainingBegin();
            int maxIteration = Analyst.MaxIteration;

            if (train.ImplementationType == TrainingImplementationType.OnePass)
            {
                train.Iteration();
                Analyst.ReportTraining(train);
            }
            else
            {
                do
                {
                    train.Iteration();
                    Analyst.ReportTraining(train);
                } while ((train.Error > targetError) &&
                         !Analyst.ShouldStopCommand() &&
                         !train.TrainingDone &&
                         ((maxIteration == -1) || (train.IterationNumber < maxIteration)));
            }
            train.FinishTraining();

            Analyst.ReportTrainingEnd();
        }
        /// <summary>
        /// Construct the LMA object.
        /// </summary>
        /// <param name="network">The network to train. Must have a single output neuron.</param>
        /// <param name="training">The training data to use. Must be indexable.</param>
        /// <param name="h">The Hessian calculator to use.</param>
        public LevenbergMarquardtTraining(BasicNetwork network,
                                          IMLDataSet training, IComputeHessian h)
            : base(TrainingImplementationType.Iterative)
        {
            ValidateNetwork.ValidateMethodToData(network, training);

            Training           = training;
            _indexableTraining = Training;
            this._network      = network;
            _trainingLength    = (int)_indexableTraining.Count;
            _weightCount       = this._network.Structure.CalculateSize();
            _lambda            = 0.1;
            _deltas            = new double[_weightCount];
            _diagonal          = new double[_weightCount];

            var input = new BasicMLData(
                _indexableTraining.InputSize);
            var ideal = new BasicMLData(
                _indexableTraining.IdealSize);

            _pair = new BasicMLDataPair(input, ideal);

            _hessian = h;
            _hessian.Init(network, training);
        }
Ejemplo n.º 3
0
        /// <summary>
        /// Construct the LMA object.
        /// </summary>
        ///
        /// <param name="network">The network to train. Must have a single output neuron.</param>
        /// <param name="training">The training data to use. Must be indexable.</param>
        public LevenbergMarquardtTraining(BasicNetwork network,
                                          IMLDataSet training) : base(TrainingImplementationType.Iterative)
        {
            ValidateNetwork.ValidateMethodToData(network, training);
            if (network.OutputCount != 1)
            {
                throw new TrainingError(
                          "Levenberg Marquardt requires an output layer with a single neuron.");
            }

            Training           = training;
            _indexableTraining = Training;
            _network           = network;
            _trainingLength    = (int)_indexableTraining.Count;
            _parametersLength  = _network.Structure.CalculateSize();
            _hessianMatrix     = new Matrix(_parametersLength,
                                            _parametersLength);
            _hessian  = _hessianMatrix.Data;
            _alpha    = 0.0d;
            _beta     = 1.0d;
            _lambda   = 0.1d;
            _deltas   = new double[_parametersLength];
            _gradient = new double[_parametersLength];
            _diagonal = new double[_parametersLength];

            var input = new BasicMLData(
                _indexableTraining.InputSize);
            var ideal = new BasicMLData(
                _indexableTraining.IdealSize);

            _pair = new BasicMLDataPair(input, ideal);
        }
Ejemplo n.º 4
0
 /// <summary>
 /// Construct a QPROP trainer for flat networks.
 /// </summary>
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data.</param>
 /// <param name="learnRate">The learning rate.  2 is a good suggestion as
 ///            a learning rate to start with.  If it fails to converge,
 ///            then drop it.  Just like backprop, except QPROP can
 ///            take higher learning rates.</param>
 public QuickPropagation(IContainsFlat network,
                         IMLDataSet training, double learnRate) : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     LearningRate  = learnRate;
     LastDelta     = new double[Network.Flat.Weights.Length];
     OutputEpsilon = 1.0;
 }
Ejemplo n.º 5
0
 /// <param name="network">The network that is to be trained</param>
 /// <param name="training">The training set</param>
 /// <param name="learnRate"></param>
 /// <param name="momentum"></param>
 public Backpropagation(BasicNetwork network,
                        IMLDataSet training, double learnRate,
                        double momentum) : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     _momentum     = momentum;
     _learningRate = learnRate;
     _lastDelta    = new double[Network.Flat.Weights.Length];
 }
        /**
         *
         *
         * @param network
         *
         * @param training
         *
         * @param theLearningRate
         *
         */


        /// <summary>
        /// Construct a QPROP trainer for flat networks.
        /// </summary>
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training data.</param>
        /// <param name="learnRate">The learning rate.  2 is a good suggestion as
        ///            a learning rate to start with.  If it fails to converge,
        ///            then drop it.  Just like backprop, except QPROP can
        ///            take higher learning rates.</param>
        public QuickPropagation(IContainsFlat network,
                                IMLDataSet training, double learnRate) : base(network, training)
        {
            ValidateNetwork.ValidateMethodToData(network, training);
            var backFlat = new TrainFlatNetworkQPROP(
                network.Flat, Training, learnRate);

            FlatTraining = backFlat;
        }
Ejemplo n.º 7
0
        /// <param name="network">The network that is to be trained</param>
        /// <param name="training">The training set</param>
        /// <param name="learnRate"></param>
        /// <param name="momentum"></param>
        public Backpropagation(IContainsFlat network,
                               IMLDataSet training, double learnRate,
                               double momentum) : base(network, training)
        {
            ValidateNetwork.ValidateMethodToData(network, training);
            var backFlat = new TrainFlatNetworkBackPropagation(
                network.Flat, Training, learnRate, momentum);

            FlatTraining = backFlat;
        }
Ejemplo n.º 8
0
        /// <summary>
        /// Construct the LMA object.
        /// </summary>
        /// <param name="network">The network to train. Must have a single output neuron.</param>
        /// <param name="training">The training data to use. Must be indexable.</param>
        /// <param name="h">The Hessian calculator to use.</param>
        public LevenbergMarquardtTraining(BasicNetwork network,
                                          IMLDataSet training, IComputeHessian h)
            : base(TrainingImplementationType.Iterative)
        {
            ValidateNetwork.ValidateMethodToData(network, training);

            Training           = training;
            _indexableTraining = Training;
            _network           = network;
            _trainingLength    = _indexableTraining.Count;
            _weightCount       = _network.Structure.CalculateSize();
            _lambda            = 0.1;
            _deltas            = new double[_weightCount];
            _diagonal          = new double[_weightCount];

            _hessian = h;
        }
 /// <summary>
 /// Construct a QPROP trainer for flat networks.
 /// </summary>
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data.</param>
 /// <param name="learnRate">The learning rate.  2 is a good suggestion as
 ///            a learning rate to start with.  If it fails to converge,
 ///            then drop it.  Just like backprop, except QPROP can
 ///            take higher learning rates.</param>
 public QuickPropagation(BasicNetwork network,
                         IMLDataSet training, double learnRate) : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     LearningRate = learnRate;
 }