Beispiel #1
0
 public QuickPropagation(IContainsFlat network, IMLDataSet training, double learnRate)
     : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     TrainFlatNetworkQPROP kqprop = new TrainFlatNetworkQPROP(network.Flat, this.Training, learnRate);
     base.FlatTraining = kqprop;
 }
 /// <summary>
 /// Construct a training class.
 /// </summary>
 ///
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data.</param>
 public ScaledConjugateGradient(IContainsFlat network,
                                IMLDataSet training) : base(network, training)
 {
     var rpropFlat = new TrainFlatNetworkSCG(
         network.Flat, Training);
     FlatTraining = rpropFlat;
 }
 /// <summary>
 /// Construct a Manhattan propagation training object.
 /// </summary>
 ///
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data to use.</param>
 /// <param name="learnRate">The learning rate.</param>
 public ManhattanPropagation(IContainsFlat network,
                             IMLDataSet training, double learnRate)
     : base(network, training)
 {
     _learningRate = learnRate;
     _zeroTolerance = RPROPConst.DefaultZeroTolerance;
 }
Beispiel #4
0
 public Backpropagation(IContainsFlat network, IMLDataSet training, double learnRate, double momentum)
     : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     TrainFlatNetworkBackPropagation propagation = new TrainFlatNetworkBackPropagation(network.Flat, this.Training, learnRate, momentum);
     base.FlatTraining = propagation;
 }
Beispiel #5
0
 /// <summary>
 /// Construct a QPROP trainer for flat networks.
 /// </summary>
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data.</param>
 /// <param name="learnRate">The learning rate.  2 is a good suggestion as
 ///            a learning rate to start with.  If it fails to converge,
 ///            then drop it.  Just like backprop, except QPROP can
 ///            take higher learning rates.</param>
 public QuickPropagation(IContainsFlat network,
                         IMLDataSet training, double learnRate) : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     LearningRate  = learnRate;
     LastDelta     = new double[Network.Flat.Weights.Length];
     OutputEpsilon = 1.0;
 }
        /**
* 
* 
* @param network
*            
* @param training
*            
* @param theLearningRate
*            
*/


        /// <summary>
        /// Construct a QPROP trainer for flat networks.
        /// </summary>
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training data.</param>
        /// <param name="learnRate">The learning rate.  2 is a good suggestion as 
        ///            a learning rate to start with.  If it fails to converge, 
        ///            then drop it.  Just like backprop, except QPROP can 
        ///            take higher learning rates.</param>
        public QuickPropagation(IContainsFlat network,
                                IMLDataSet training, double learnRate) : base(network, training)
        {
            ValidateNetwork.ValidateMethodToData(network, training);
            var backFlat = new TrainFlatNetworkQPROP(
                network.Flat, Training, learnRate);
            FlatTraining = backFlat;
        }
Beispiel #7
0
        /// <summary>
        /// Construct a training class.
        /// </summary>
        ///
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training data.</param>
        public ScaledConjugateGradient(IContainsFlat network,
                                       IMLDataSet training) : base(network, training)
        {
            var rpropFlat = new TrainFlatNetworkSCG(
                network.Flat, Training);

            FlatTraining = rpropFlat;
        }
 /// <summary>
 /// Construct a QPROP trainer for flat networks.
 /// </summary>
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data.</param>
 /// <param name="learnRate">The learning rate.  2 is a good suggestion as 
 ///            a learning rate to start with.  If it fails to converge, 
 ///            then drop it.  Just like backprop, except QPROP can 
 ///            take higher learning rates.</param>
 public QuickPropagation(IContainsFlat network,
     IMLDataSet training, double learnRate)
     : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     LearningRate = learnRate;
     LastDelta = new double[Network.Flat.Weights.Length];
     OutputEpsilon = 1.0;
 }
 /// <param name="network">The network that is to be trained</param>
 /// <param name="training">The training set</param>
 /// <param name="learnRate"></param>
 /// <param name="momentum"></param>
 public Backpropagation(IContainsFlat network,
                        IMLDataSet training, double learnRate,
                        double momentum) : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     var backFlat = new TrainFlatNetworkBackPropagation(
         network.Flat, Training, learnRate, momentum);
     FlatTraining = backFlat;
 }
        /**
         *
         *
         * @param network
         *
         * @param training
         *
         * @param theLearningRate
         *
         */


        /// <summary>
        /// Construct a QPROP trainer for flat networks.
        /// </summary>
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training data.</param>
        /// <param name="learnRate">The learning rate.  2 is a good suggestion as
        ///            a learning rate to start with.  If it fails to converge,
        ///            then drop it.  Just like backprop, except QPROP can
        ///            take higher learning rates.</param>
        public QuickPropagation(IContainsFlat network,
                                IMLDataSet training, double learnRate) : base(network, training)
        {
            ValidateNetwork.ValidateMethodToData(network, training);
            var backFlat = new TrainFlatNetworkQPROP(
                network.Flat, Training, learnRate);

            FlatTraining = backFlat;
        }
Beispiel #11
0
 /// <param name="network">The network that is to be trained</param>
 /// <param name="training">The training set</param>
 /// <param name="learnRate"></param>
 /// <param name="momentum"></param>
 public Backpropagation(IContainsFlat network,
                        IMLDataSet training, double learnRate,
                        double momentum) : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     _momentum     = momentum;
     _learningRate = learnRate;
     _lastDelta    = new double[Network.Flat.Weights.Length];
 }
        /// <summary>
        /// Construct a resilient training object, allow the training parameters to
        /// be specified. Usually the default parameters are acceptable for the
        /// resilient training algorithm. Therefore you should usually use the other
        /// constructor, that makes use of the default values.
        /// </summary>
        ///
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training set to use.</param>
        /// <param name="initialUpdate"></param>
        /// <param name="maxStep">The maximum that a delta can reach.</param>
        public ResilientPropagation(IContainsFlat network,
                                    IMLDataSet training, double initialUpdate,
                                    double maxStep) : base(network, training)
        {
            var rpropFlat = new TrainFlatNetworkResilient(
                network.Flat, Training,
                RPROPConst.DefaultZeroTolerance, initialUpdate, maxStep);

            FlatTraining = rpropFlat;
        }
Beispiel #13
0
        /// <param name="network">The network that is to be trained</param>
        /// <param name="training">The training set</param>
        /// <param name="learnRate"></param>
        /// <param name="momentum"></param>
        public Backpropagation(IContainsFlat network,
                               IMLDataSet training, double learnRate,
                               double momentum) : base(network, training)
        {
            ValidateNetwork.ValidateMethodToData(network, training);
            var backFlat = new TrainFlatNetworkBackPropagation(
                network.Flat, Training, learnRate, momentum);

            FlatTraining = backFlat;
        }
        /// <summary>
        /// Construct a resilient training object, allow the training parameters to
        /// be specified. Usually the default parameters are acceptable for the
        /// resilient training algorithm. Therefore you should usually use the other
        /// constructor, that makes use of the default values.
        /// </summary>
        ///
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training set to use.</param>
        /// <param name="initialUpdate"></param>
        /// <param name="maxStep">The maximum that a delta can reach.</param>
        public ResilientPropagation(IContainsFlat network,
                                    IMLDataSet training, double initialUpdate,
                                    double maxStep) : base(network, training)
        {
            _updateValues      = new double[network.Flat.Weights.Length];
            _zeroTolerance     = RPROPConst.DefaultZeroTolerance;
            _maxStep           = maxStep;
            _lastWeightChanged = new double[Network.Flat.Weights.Length];
            _lastDelta         = new double[Network.Flat.Weights.Length];

            for (int i = 0; i < _updateValues.Length; i++)
            {
                _updateValues[i] = initialUpdate;
            }
        }
Beispiel #15
0
        /// <summary>
        /// Construct a propagation object.
        /// </summary>
        ///
        /// <param name="network">The network.</param>
        /// <param name="training">The training set.</param>
        protected Propagation(IContainsFlat network, IMLDataSet training) : base(TrainingImplementationType.Iterative)
        {
            _network  = network;
            _flat     = network.Flat;
            _training = training;

            Gradients     = new double[_flat.Weights.Length];
            _lastGradient = new double[_flat.Weights.Length];

            _indexable         = training;
            _numThreads        = 0;
            _reportedException = null;
            FixFlatSpot        = true;
            ErrorFunction      = new LinearErrorFunction();
        }
        public StochasticGradientDescent(IContainsFlat network,
                                         IMLDataSet training, IGenerateRandom theRandom) :
            base(TrainingImplementationType.Iterative)
        {
            Training   = training;
            UpdateRule = new AdamUpdate();

            if (!(training is BatchDataSet))
            {
                BatchSize = 25;
            }

            _method           = network;
            _flat             = network.Flat;
            _layerDelta       = new double[_flat.LayerOutput.Length];
            _gradients        = new double[_flat.Weights.Length];
            _errorCalculation = new ErrorCalculation();
            _rnd         = theRandom;
            LearningRate = 0.001;
            Momentum     = 0.9;
        }
Beispiel #17
0
        /// <summary>
        /// Validate a network for training.
        /// </summary>
        ///
        /// <param name="network">The network to validate.</param>
        /// <param name="training">The training set to validate.</param>
        public static void ValidateNetworkForTraining(IContainsFlat network,
                                                      IMLDataSet training)
        {
            int inputCount  = network.Flat.InputCount;
            int outputCount = network.Flat.OutputCount;

            if (inputCount != training.InputSize)
            {
                throw new NeuralNetworkError("The input layer size of "
                                             + inputCount + " must match the training input size of "
                                             + training.InputSize + ".");
            }

            if ((training.IdealSize > 0) &&
                (outputCount != training.IdealSize))
            {
                throw new NeuralNetworkError("The output layer size of "
                                             + outputCount + " must match the training input size of "
                                             + training.IdealSize + ".");
            }
        }
        /// <summary>
        /// Validate a network for training.
        /// </summary>
        ///
        /// <param name="network">The network to validate.</param>
        /// <param name="training">The training set to validate.</param>
        public static void ValidateNetworkForTraining(IContainsFlat network,
                                                      IMLDataSet training)
        {
            int inputCount = network.Flat.InputCount;
            int outputCount = network.Flat.OutputCount;

            if (inputCount != training.InputSize)
            {
                throw new NeuralNetworkError("The input layer size of "
                                             + inputCount + " must match the training input size of "
                                             + training.InputSize + ".");
            }

            if ((training.IdealSize > 0)
                && (outputCount != training.IdealSize))
            {
                throw new NeuralNetworkError("The output layer size of "
                                             + outputCount + " must match the training input size of "
                                             + training.IdealSize + ".");
            }
        }
Beispiel #19
0
        /// <summary>
        /// Construct a training class.
        /// </summary>
        ///
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training data.</param>
        public ScaledConjugateGradient(IContainsFlat network,
                                       IMLDataSet training) : base(network, training)
        {
            _success = true;

            _success  = true;
            _delta    = 0;
            _lambda2  = 0;
            _lambda   = FirstLambda;
            _oldError = 0;
            _magP     = 0;
            _restart  = false;

            _weights = EngineArray.ArrayCopy(network.Flat.Weights);
            int numWeights = _weights.Length;

            _oldWeights  = new double[numWeights];
            _oldGradient = new double[numWeights];

            _p = new double[numWeights];
            _r = new double[numWeights];

            _mustInit = true;
        }
Beispiel #20
0
 public static void ValidateNetworkForTraining(IContainsFlat network, IMLDataSet training)
 {
     int num2;
     object[] objArray;
     object[] objArray2;
     int inputCount = network.Flat.InputCount;
     if (((uint) num2) <= uint.MaxValue)
     {
     Label_01D4:
         num2 = network.Flat.OutputCount;
         if (inputCount != training.InputSize)
         {
             objArray = new object[5];
             objArray[0] = "The input layer size of ";
             if (15 != 0)
             {
                 objArray[1] = inputCount;
                 goto Label_0187;
             }
             goto Label_01D4;
         }
         if ((((uint) num2) - ((uint) inputCount)) >= 0)
         {
             goto Label_008F;
         }
         goto Label_0098;
     }
     goto Label_0187;
     Label_0088:
     if (-2 != 0)
     {
         goto Label_0098;
     }
     Label_008F:
     if (training.IdealSize <= 0)
     {
         if ((((uint) inputCount) + ((uint) inputCount)) <= uint.MaxValue)
         {
             return;
         }
         goto Label_0088;
     }
     Label_0098:
     if (num2 != training.IdealSize)
     {
         objArray2 = new object[5];
         objArray2[0] = "The output layer size of ";
     }
     else
     {
         return;
         if ((((uint) num2) + ((uint) inputCount)) <= uint.MaxValue)
         {
             goto Label_0088;
         }
         goto Label_008F;
     }
     Label_00ED:
     objArray2[1] = num2;
     objArray2[2] = " must match the training input size of ";
     objArray2[3] = training.IdealSize;
     objArray2[4] = ".";
     throw new NeuralNetworkError(string.Concat(objArray2));
     if ((((uint) num2) | uint.MaxValue) != 0)
     {
         return;
     }
     if ((((uint) inputCount) & 0) == 0)
     {
         goto Label_0187;
     }
     Label_013A:
     objArray[3] = training.InputSize;
     objArray[4] = ".";
     throw new NeuralNetworkError(string.Concat(objArray));
     Label_0187:
     if ((((uint) inputCount) + ((uint) num2)) > uint.MaxValue)
     {
         goto Label_00ED;
     }
     objArray[2] = " must match the training input size of ";
     goto Label_013A;
 }
 /// <param name="network">The network that is to be trained</param>
 /// <param name="training">The training set</param>
 /// <param name="learnRate"></param>
 /// <param name="momentum"></param>
 public Backpropagation(IContainsFlat network,
     IMLDataSet training, double learnRate,
     double momentum)
     : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     _momentum = momentum;
     _learningRate = learnRate;
     _lastDelta = new double[Network.Flat.Weights.Length];
 }
 /// <summary>
 /// Create a class to train using backpropagation. Use auto learn rate and
 /// momentum. Use the CPU to train.
 /// </summary>
 ///
 /// <param name="network">The network that is to be trained.</param>
 /// <param name="training">The training data to be used for backpropagation.</param>
 public Backpropagation(IContainsFlat network, IMLDataSet training)
     : this(network, training, 0, 0)
 {
     AddStrategy(new SmartLearningRate());
     AddStrategy(new SmartMomentum());
 }
 /// <summary>
 /// Construct a propagation object.
 /// </summary>
 ///
 /// <param name="network">The network.</param>
 /// <param name="training">The training set.</param>
 protected Propagation(IContainsFlat network, IMLDataSet training) : base(TrainingImplementationType.Iterative)
 {
     _network = network;
     Training = training;
 }
Beispiel #24
0
 public ResilientPropagation(IContainsFlat network, IMLDataSet training)
     : this(network, training, 0.1, 50.0)
 {
 }
Beispiel #25
0
        /// <summary>
        /// Construct a resilient training object, allow the training parameters to
        /// be specified. Usually the default parameters are acceptable for the
        /// resilient training algorithm. Therefore you should usually use the other
        /// constructor, that makes use of the default values.
        /// </summary>
        ///
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training set to use.</param>
        /// <param name="initialUpdate"></param>
        /// <param name="maxStep">The maximum that a delta can reach.</param>
        public ResilientPropagation(IContainsFlat network,
                                    IMLDataSet training, double initialUpdate,
                                    double maxStep)
            : base(network, training)
        {
            _updateValues = new double[network.Flat.Weights.Length];
            _zeroTolerance = RPROPConst.DefaultZeroTolerance;
            _maxStep = maxStep;
            _lastWeightChanged = new double[Network.Flat.Weights.Length];
            _lastDelta = new double[Network.Flat.Weights.Length];

            for (int i = 0; i < _updateValues.Length; i++)
            {
                _updateValues[i] = initialUpdate;
            }
        }
Beispiel #26
0
 protected Propagation(IContainsFlat network, IMLDataSet training)
     : base(TrainingImplementationType.Iterative)
 {
     this._x87a7fc6a72741c2e = network;
     this.Training = training;
 }
 /// <summary>
 /// Construct an RPROP trainer, allows an OpenCL device to be specified. Use
 /// the defaults for all training parameters. Usually this is the constructor
 /// to use as the resilient training algorithm is designed for the default
 /// parameters to be acceptable for nearly all problems.
 /// </summary>
 ///
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data to use.</param>
 public ResilientPropagation(IContainsFlat network,
                             IMLDataSet training)
     : this(network, training, RPROPConst.DefaultInitialUpdate, RPROPConst.DefaultMaxStep)
 {
 }
 /// <summary>
 /// Construct a QPROP trainer for flat networks.  Uses a learning rate of 2.
 /// </summary>
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data.</param>
 public QuickPropagation(IContainsFlat network, IMLDataSet training) : this(network, training, 2.0)
 {
 }
Beispiel #29
0
 public ITrain TrainNetwork(IContainsFlat network, IMLDataSet trainingSet)
 {
     return(new Backpropagation(network, trainingSet, learningRate, momentum));
 }
Beispiel #30
0
 public ScaledConjugateGradient(IContainsFlat network, IMLDataSet training)
     : base(network, training)
 {
     TrainFlatNetworkSCG kscg = new TrainFlatNetworkSCG(network.Flat, this.Training);
     base.FlatTraining = kscg;
 }
        /// <summary>
        /// Construct a training class.
        /// </summary>
        ///
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training data.</param>
        public ScaledConjugateGradient(IContainsFlat network,
                                       IMLDataSet training)
            : base(network, training)
        {
            _success = true;

            _success = true;
            _delta = 0;
            _lambda2 = 0;
            _lambda = FirstLambda;
            _oldError = 0;
            _magP = 0;
            _restart = false;

            _weights = EngineArray.ArrayCopy(network.Flat.Weights);
            int numWeights = _weights.Length;

            _oldWeights = new double[numWeights];
            _oldGradient = new double[numWeights];

            _p = new double[numWeights];
            _r = new double[numWeights];

            _mustInit = true;
        }
 /// <summary>
 /// Construct a Manhattan propagation training object.
 /// </summary>
 ///
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data to use.</param>
 /// <param name="learnRate">The learning rate.</param>
 public ManhattanPropagation(IContainsFlat network,
                             IMLDataSet training, double learnRate) : base(network, training)
 {
     FlatTraining = new TrainFlatNetworkManhattan(network.Flat,
                                                  Training, learnRate);
 }
 /// <summary>
 /// Construct a Manhattan propagation training object.
 /// </summary>
 ///
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data to use.</param>
 /// <param name="learnRate">The learning rate.</param>
 public ManhattanPropagation(IContainsFlat network,
                             IMLDataSet training, double learnRate) : base(network, training)
 {
     FlatTraining = new TrainFlatNetworkManhattan(network.Flat,
                                                  Training, learnRate);
 }
Beispiel #34
0
 /// <summary>
 /// Construct a QPROP trainer for flat networks.  Uses a learning rate of 2.
 /// </summary>
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data.</param>
 public QuickPropagation(IContainsFlat network, IMLDataSet training) : this(network, training, 2.0)
 {
 }
 /// <summary>
 /// Construct a Manhattan propagation training object.
 /// </summary>
 ///
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data to use.</param>
 /// <param name="learnRate">The learning rate.</param>
 public ManhattanPropagation(IContainsFlat network,
                             IMLDataSet training, double learnRate) : base(network, training)
 {
     _learningRate  = learnRate;
     _zeroTolerance = RPROPConst.DefaultZeroTolerance;
 }
 /// <summary>
 /// Construct a resilient training object, allow the training parameters to
 /// be specified. Usually the default parameters are acceptable for the
 /// resilient training algorithm. Therefore you should usually use the other
 /// constructor, that makes use of the default values.
 /// </summary>
 ///
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training set to use.</param>
 /// <param name="initialUpdate"></param>
 /// <param name="maxStep">The maximum that a delta can reach.</param>
 public ResilientPropagation(IContainsFlat network,
                             IMLDataSet training, double initialUpdate,
                             double maxStep) : base(network, training)
 {
     var rpropFlat = new TrainFlatNetworkResilient(
         network.Flat, Training,
         RPROPConst.DefaultZeroTolerance, initialUpdate, maxStep);
     FlatTraining = rpropFlat;
 }
Beispiel #37
0
 public ITrain TrainNetwork(IContainsFlat network, IMLDataSet trainingSet)
 {
     return(new Encog.Neural.Networks.Training.Propagation.Resilient.ResilientPropagation(network, trainingSet));
 }
 /// <summary>
 /// Construct a propagation object.
 /// </summary>
 ///
 /// <param name="network">The network.</param>
 /// <param name="training">The training set.</param>
 protected Propagation(IContainsFlat network, IMLDataSet training) : base(TrainingImplementationType.Iterative)
 {
     _network = network;
     Training = training;
 }
Beispiel #39
0
 /// <summary>
 /// Create a class to train using backpropagation. Use auto learn rate and
 /// momentum. Use the CPU to train.
 /// </summary>
 ///
 /// <param name="network">The network that is to be trained.</param>
 /// <param name="training">The training data to be used for backpropagation.</param>
 public Backpropagation(IContainsFlat network, IMLDataSet training) : this(network, training, 0, 0)
 {
     AddStrategy(new SmartLearningRate());
     AddStrategy(new SmartMomentum());
 }
Beispiel #40
0
 public ResilientPropagation(IContainsFlat network, IMLDataSet training, double initialUpdate, double maxStep)
     : base(network, training)
 {
     TrainFlatNetworkResilient resilient = new TrainFlatNetworkResilient(network.Flat, this.Training, 1E-17, initialUpdate, maxStep);
     base.FlatTraining = resilient;
 }
 /// <summary>
 /// Construct an RPROP trainer, allows an OpenCL device to be specified. Use
 /// the defaults for all training parameters. Usually this is the constructor
 /// to use as the resilient training algorithm is designed for the default
 /// parameters to be acceptable for nearly all problems.
 /// </summary>
 ///
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data to use.</param>
 public ResilientPropagation(IContainsFlat network,
                             IMLDataSet training)
     : this(network, training, RPROPConst.DefaultInitialUpdate, RPROPConst.DefaultMaxStep)
 {
 }
 public StochasticGradientDescent(IContainsFlat network,
                                  IMLDataSet training) :
     this(network, training, new MersenneTwisterGenerateRandom())
 {
 }
        /// <summary>
        /// Construct a propagation object.
        /// </summary>
        ///
        /// <param name="network">The network.</param>
        /// <param name="training">The training set.</param>
        protected Propagation(IContainsFlat network, IMLDataSet training)
            : base(TrainingImplementationType.Iterative)
        {
            _network = network;
            _flat = network.Flat;
            _training = training;

            Gradients = new double[_flat.Weights.Length];
            _lastGradient = new double[_flat.Weights.Length];

            _indexable = training;
            _numThreads = 0;
            _reportedException = null;
            FixFlatSpot = true;
            ErrorFunction = new LinearErrorFunction();
        }