コード例 #1
0
ファイル: QuickPropagation.cs プロジェクト: neismit/emds
 public QuickPropagation(IContainsFlat network, IMLDataSet training, double learnRate)
     : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     TrainFlatNetworkQPROP kqprop = new TrainFlatNetworkQPROP(network.Flat, this.Training, learnRate);
     base.FlatTraining = kqprop;
 }
コード例 #2
0
 /// <summary>
 /// Construct a training class.
 /// </summary>
 ///
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data.</param>
 public ScaledConjugateGradient(IContainsFlat network,
                                IMLDataSet training) : base(network, training)
 {
     var rpropFlat = new TrainFlatNetworkSCG(
         network.Flat, Training);
     FlatTraining = rpropFlat;
 }
コード例 #3
0
 /// <summary>
 /// Construct a Manhattan propagation training object.
 /// </summary>
 ///
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data to use.</param>
 /// <param name="learnRate">The learning rate.</param>
 public ManhattanPropagation(IContainsFlat network,
                             IMLDataSet training, double learnRate)
     : base(network, training)
 {
     _learningRate = learnRate;
     _zeroTolerance = RPROPConst.DefaultZeroTolerance;
 }
コード例 #4
0
ファイル: Backpropagation.cs プロジェクト: neismit/emds
 public Backpropagation(IContainsFlat network, IMLDataSet training, double learnRate, double momentum)
     : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     TrainFlatNetworkBackPropagation propagation = new TrainFlatNetworkBackPropagation(network.Flat, this.Training, learnRate, momentum);
     base.FlatTraining = propagation;
 }
コード例 #5
0
 /// <summary>
 /// Construct a QPROP trainer for flat networks.
 /// </summary>
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data.</param>
 /// <param name="learnRate">The learning rate.  2 is a good suggestion as
 ///            a learning rate to start with.  If it fails to converge,
 ///            then drop it.  Just like backprop, except QPROP can
 ///            take higher learning rates.</param>
 public QuickPropagation(IContainsFlat network,
                         IMLDataSet training, double learnRate) : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     LearningRate  = learnRate;
     LastDelta     = new double[Network.Flat.Weights.Length];
     OutputEpsilon = 1.0;
 }
コード例 #6
0
        /**
* 
* 
* @param network
*            
* @param training
*            
* @param theLearningRate
*            
*/


        /// <summary>
        /// Construct a QPROP trainer for flat networks.
        /// </summary>
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training data.</param>
        /// <param name="learnRate">The learning rate.  2 is a good suggestion as 
        ///            a learning rate to start with.  If it fails to converge, 
        ///            then drop it.  Just like backprop, except QPROP can 
        ///            take higher learning rates.</param>
        public QuickPropagation(IContainsFlat network,
                                IMLDataSet training, double learnRate) : base(network, training)
        {
            ValidateNetwork.ValidateMethodToData(network, training);
            var backFlat = new TrainFlatNetworkQPROP(
                network.Flat, Training, learnRate);
            FlatTraining = backFlat;
        }
コード例 #7
0
        /// <summary>
        /// Construct a training class.
        /// </summary>
        ///
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training data.</param>
        public ScaledConjugateGradient(IContainsFlat network,
                                       IMLDataSet training) : base(network, training)
        {
            var rpropFlat = new TrainFlatNetworkSCG(
                network.Flat, Training);

            FlatTraining = rpropFlat;
        }
コード例 #8
0
 /// <summary>
 /// Construct a QPROP trainer for flat networks.
 /// </summary>
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data.</param>
 /// <param name="learnRate">The learning rate.  2 is a good suggestion as 
 ///            a learning rate to start with.  If it fails to converge, 
 ///            then drop it.  Just like backprop, except QPROP can 
 ///            take higher learning rates.</param>
 public QuickPropagation(IContainsFlat network,
     IMLDataSet training, double learnRate)
     : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     LearningRate = learnRate;
     LastDelta = new double[Network.Flat.Weights.Length];
     OutputEpsilon = 1.0;
 }
コード例 #9
0
 /// <param name="network">The network that is to be trained</param>
 /// <param name="training">The training set</param>
 /// <param name="learnRate"></param>
 /// <param name="momentum"></param>
 public Backpropagation(IContainsFlat network,
                        IMLDataSet training, double learnRate,
                        double momentum) : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     var backFlat = new TrainFlatNetworkBackPropagation(
         network.Flat, Training, learnRate, momentum);
     FlatTraining = backFlat;
 }
コード例 #10
0
        /**
         *
         *
         * @param network
         *
         * @param training
         *
         * @param theLearningRate
         *
         */


        /// <summary>
        /// Construct a QPROP trainer for flat networks.
        /// </summary>
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training data.</param>
        /// <param name="learnRate">The learning rate.  2 is a good suggestion as
        ///            a learning rate to start with.  If it fails to converge,
        ///            then drop it.  Just like backprop, except QPROP can
        ///            take higher learning rates.</param>
        public QuickPropagation(IContainsFlat network,
                                IMLDataSet training, double learnRate) : base(network, training)
        {
            ValidateNetwork.ValidateMethodToData(network, training);
            var backFlat = new TrainFlatNetworkQPROP(
                network.Flat, Training, learnRate);

            FlatTraining = backFlat;
        }
コード例 #11
0
 /// <param name="network">The network that is to be trained</param>
 /// <param name="training">The training set</param>
 /// <param name="learnRate"></param>
 /// <param name="momentum"></param>
 public Backpropagation(IContainsFlat network,
                        IMLDataSet training, double learnRate,
                        double momentum) : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     _momentum     = momentum;
     _learningRate = learnRate;
     _lastDelta    = new double[Network.Flat.Weights.Length];
 }
コード例 #12
0
        /// <summary>
        /// Construct a resilient training object, allow the training parameters to
        /// be specified. Usually the default parameters are acceptable for the
        /// resilient training algorithm. Therefore you should usually use the other
        /// constructor, that makes use of the default values.
        /// </summary>
        ///
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training set to use.</param>
        /// <param name="initialUpdate"></param>
        /// <param name="maxStep">The maximum that a delta can reach.</param>
        public ResilientPropagation(IContainsFlat network,
                                    IMLDataSet training, double initialUpdate,
                                    double maxStep) : base(network, training)
        {
            var rpropFlat = new TrainFlatNetworkResilient(
                network.Flat, Training,
                RPROPConst.DefaultZeroTolerance, initialUpdate, maxStep);

            FlatTraining = rpropFlat;
        }
コード例 #13
0
        /// <param name="network">The network that is to be trained</param>
        /// <param name="training">The training set</param>
        /// <param name="learnRate"></param>
        /// <param name="momentum"></param>
        public Backpropagation(IContainsFlat network,
                               IMLDataSet training, double learnRate,
                               double momentum) : base(network, training)
        {
            ValidateNetwork.ValidateMethodToData(network, training);
            var backFlat = new TrainFlatNetworkBackPropagation(
                network.Flat, Training, learnRate, momentum);

            FlatTraining = backFlat;
        }
コード例 #14
0
        /// <summary>
        /// Construct a resilient training object, allow the training parameters to
        /// be specified. Usually the default parameters are acceptable for the
        /// resilient training algorithm. Therefore you should usually use the other
        /// constructor, that makes use of the default values.
        /// </summary>
        ///
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training set to use.</param>
        /// <param name="initialUpdate"></param>
        /// <param name="maxStep">The maximum that a delta can reach.</param>
        public ResilientPropagation(IContainsFlat network,
                                    IMLDataSet training, double initialUpdate,
                                    double maxStep) : base(network, training)
        {
            _updateValues      = new double[network.Flat.Weights.Length];
            _zeroTolerance     = RPROPConst.DefaultZeroTolerance;
            _maxStep           = maxStep;
            _lastWeightChanged = new double[Network.Flat.Weights.Length];
            _lastDelta         = new double[Network.Flat.Weights.Length];

            for (int i = 0; i < _updateValues.Length; i++)
            {
                _updateValues[i] = initialUpdate;
            }
        }
コード例 #15
0
        /// <summary>
        /// Construct a propagation object.
        /// </summary>
        ///
        /// <param name="network">The network.</param>
        /// <param name="training">The training set.</param>
        protected Propagation(IContainsFlat network, IMLDataSet training) : base(TrainingImplementationType.Iterative)
        {
            _network  = network;
            _flat     = network.Flat;
            _training = training;

            Gradients     = new double[_flat.Weights.Length];
            _lastGradient = new double[_flat.Weights.Length];

            _indexable         = training;
            _numThreads        = 0;
            _reportedException = null;
            FixFlatSpot        = true;
            ErrorFunction      = new LinearErrorFunction();
        }
コード例 #16
0
        public StochasticGradientDescent(IContainsFlat network,
                                         IMLDataSet training, IGenerateRandom theRandom) :
            base(TrainingImplementationType.Iterative)
        {
            Training   = training;
            UpdateRule = new AdamUpdate();

            if (!(training is BatchDataSet))
            {
                BatchSize = 25;
            }

            _method           = network;
            _flat             = network.Flat;
            _layerDelta       = new double[_flat.LayerOutput.Length];
            _gradients        = new double[_flat.Weights.Length];
            _errorCalculation = new ErrorCalculation();
            _rnd         = theRandom;
            LearningRate = 0.001;
            Momentum     = 0.9;
        }
コード例 #17
0
        /// <summary>
        /// Validate a network for training.
        /// </summary>
        ///
        /// <param name="network">The network to validate.</param>
        /// <param name="training">The training set to validate.</param>
        public static void ValidateNetworkForTraining(IContainsFlat network,
                                                      IMLDataSet training)
        {
            int inputCount  = network.Flat.InputCount;
            int outputCount = network.Flat.OutputCount;

            if (inputCount != training.InputSize)
            {
                throw new NeuralNetworkError("The input layer size of "
                                             + inputCount + " must match the training input size of "
                                             + training.InputSize + ".");
            }

            if ((training.IdealSize > 0) &&
                (outputCount != training.IdealSize))
            {
                throw new NeuralNetworkError("The output layer size of "
                                             + outputCount + " must match the training input size of "
                                             + training.IdealSize + ".");
            }
        }
コード例 #18
0
        /// <summary>
        /// Validate a network for training.
        /// </summary>
        ///
        /// <param name="network">The network to validate.</param>
        /// <param name="training">The training set to validate.</param>
        public static void ValidateNetworkForTraining(IContainsFlat network,
                                                      IMLDataSet training)
        {
            int inputCount = network.Flat.InputCount;
            int outputCount = network.Flat.OutputCount;

            if (inputCount != training.InputSize)
            {
                throw new NeuralNetworkError("The input layer size of "
                                             + inputCount + " must match the training input size of "
                                             + training.InputSize + ".");
            }

            if ((training.IdealSize > 0)
                && (outputCount != training.IdealSize))
            {
                throw new NeuralNetworkError("The output layer size of "
                                             + outputCount + " must match the training input size of "
                                             + training.IdealSize + ".");
            }
        }
コード例 #19
0
        /// <summary>
        /// Construct a training class.
        /// </summary>
        ///
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training data.</param>
        public ScaledConjugateGradient(IContainsFlat network,
                                       IMLDataSet training) : base(network, training)
        {
            _success = true;

            _success  = true;
            _delta    = 0;
            _lambda2  = 0;
            _lambda   = FirstLambda;
            _oldError = 0;
            _magP     = 0;
            _restart  = false;

            _weights = EngineArray.ArrayCopy(network.Flat.Weights);
            int numWeights = _weights.Length;

            _oldWeights  = new double[numWeights];
            _oldGradient = new double[numWeights];

            _p = new double[numWeights];
            _r = new double[numWeights];

            _mustInit = true;
        }
コード例 #20
0
ファイル: EncogValidate.cs プロジェクト: neismit/emds
 public static void ValidateNetworkForTraining(IContainsFlat network, IMLDataSet training)
 {
     int num2;
     object[] objArray;
     object[] objArray2;
     int inputCount = network.Flat.InputCount;
     if (((uint) num2) <= uint.MaxValue)
     {
     Label_01D4:
         num2 = network.Flat.OutputCount;
         if (inputCount != training.InputSize)
         {
             objArray = new object[5];
             objArray[0] = "The input layer size of ";
             if (15 != 0)
             {
                 objArray[1] = inputCount;
                 goto Label_0187;
             }
             goto Label_01D4;
         }
         if ((((uint) num2) - ((uint) inputCount)) >= 0)
         {
             goto Label_008F;
         }
         goto Label_0098;
     }
     goto Label_0187;
     Label_0088:
     if (-2 != 0)
     {
         goto Label_0098;
     }
     Label_008F:
     if (training.IdealSize <= 0)
     {
         if ((((uint) inputCount) + ((uint) inputCount)) <= uint.MaxValue)
         {
             return;
         }
         goto Label_0088;
     }
     Label_0098:
     if (num2 != training.IdealSize)
     {
         objArray2 = new object[5];
         objArray2[0] = "The output layer size of ";
     }
     else
     {
         return;
         if ((((uint) num2) + ((uint) inputCount)) <= uint.MaxValue)
         {
             goto Label_0088;
         }
         goto Label_008F;
     }
     Label_00ED:
     objArray2[1] = num2;
     objArray2[2] = " must match the training input size of ";
     objArray2[3] = training.IdealSize;
     objArray2[4] = ".";
     throw new NeuralNetworkError(string.Concat(objArray2));
     if ((((uint) num2) | uint.MaxValue) != 0)
     {
         return;
     }
     if ((((uint) inputCount) & 0) == 0)
     {
         goto Label_0187;
     }
     Label_013A:
     objArray[3] = training.InputSize;
     objArray[4] = ".";
     throw new NeuralNetworkError(string.Concat(objArray));
     Label_0187:
     if ((((uint) inputCount) + ((uint) num2)) > uint.MaxValue)
     {
         goto Label_00ED;
     }
     objArray[2] = " must match the training input size of ";
     goto Label_013A;
 }
コード例 #21
0
 /// <param name="network">The network that is to be trained</param>
 /// <param name="training">The training set</param>
 /// <param name="learnRate"></param>
 /// <param name="momentum"></param>
 public Backpropagation(IContainsFlat network,
     IMLDataSet training, double learnRate,
     double momentum)
     : base(network, training)
 {
     ValidateNetwork.ValidateMethodToData(network, training);
     _momentum = momentum;
     _learningRate = learnRate;
     _lastDelta = new double[Network.Flat.Weights.Length];
 }
コード例 #22
0
 /// <summary>
 /// Create a class to train using backpropagation. Use auto learn rate and
 /// momentum. Use the CPU to train.
 /// </summary>
 ///
 /// <param name="network">The network that is to be trained.</param>
 /// <param name="training">The training data to be used for backpropagation.</param>
 public Backpropagation(IContainsFlat network, IMLDataSet training)
     : this(network, training, 0, 0)
 {
     AddStrategy(new SmartLearningRate());
     AddStrategy(new SmartMomentum());
 }
コード例 #23
0
 /// <summary>
 /// Construct a propagation object.
 /// </summary>
 ///
 /// <param name="network">The network.</param>
 /// <param name="training">The training set.</param>
 protected Propagation(IContainsFlat network, IMLDataSet training) : base(TrainingImplementationType.Iterative)
 {
     _network = network;
     Training = training;
 }
コード例 #24
0
ファイル: ResilientPropagation.cs プロジェクト: neismit/emds
 public ResilientPropagation(IContainsFlat network, IMLDataSet training)
     : this(network, training, 0.1, 50.0)
 {
 }
コード例 #25
0
ファイル: ResilientPropagation.cs プロジェクト: neismit/emds
        /// <summary>
        /// Construct a resilient training object, allow the training parameters to
        /// be specified. Usually the default parameters are acceptable for the
        /// resilient training algorithm. Therefore you should usually use the other
        /// constructor, that makes use of the default values.
        /// </summary>
        ///
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training set to use.</param>
        /// <param name="initialUpdate"></param>
        /// <param name="maxStep">The maximum that a delta can reach.</param>
        public ResilientPropagation(IContainsFlat network,
                                    IMLDataSet training, double initialUpdate,
                                    double maxStep)
            : base(network, training)
        {
            _updateValues = new double[network.Flat.Weights.Length];
            _zeroTolerance = RPROPConst.DefaultZeroTolerance;
            _maxStep = maxStep;
            _lastWeightChanged = new double[Network.Flat.Weights.Length];
            _lastDelta = new double[Network.Flat.Weights.Length];

            for (int i = 0; i < _updateValues.Length; i++)
            {
                _updateValues[i] = initialUpdate;
            }
        }
コード例 #26
0
ファイル: Propagation.cs プロジェクト: neismit/emds
 protected Propagation(IContainsFlat network, IMLDataSet training)
     : base(TrainingImplementationType.Iterative)
 {
     this._x87a7fc6a72741c2e = network;
     this.Training = training;
 }
コード例 #27
0
 /// <summary>
 /// Construct an RPROP trainer, allows an OpenCL device to be specified. Use
 /// the defaults for all training parameters. Usually this is the constructor
 /// to use as the resilient training algorithm is designed for the default
 /// parameters to be acceptable for nearly all problems.
 /// </summary>
 ///
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data to use.</param>
 public ResilientPropagation(IContainsFlat network,
                             IMLDataSet training)
     : this(network, training, RPROPConst.DefaultInitialUpdate, RPROPConst.DefaultMaxStep)
 {
 }
コード例 #28
0
 /// <summary>
 /// Construct a QPROP trainer for flat networks.  Uses a learning rate of 2.
 /// </summary>
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data.</param>
 public QuickPropagation(IContainsFlat network, IMLDataSet training) : this(network, training, 2.0)
 {
 }
コード例 #29
0
ファイル: BackPropagation.cs プロジェクト: jemml/jemml-dotnet
 public ITrain TrainNetwork(IContainsFlat network, IMLDataSet trainingSet)
 {
     return(new Backpropagation(network, trainingSet, learningRate, momentum));
 }
コード例 #30
0
 public ScaledConjugateGradient(IContainsFlat network, IMLDataSet training)
     : base(network, training)
 {
     TrainFlatNetworkSCG kscg = new TrainFlatNetworkSCG(network.Flat, this.Training);
     base.FlatTraining = kscg;
 }
コード例 #31
0
        /// <summary>
        /// Construct a training class.
        /// </summary>
        ///
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training data.</param>
        public ScaledConjugateGradient(IContainsFlat network,
                                       IMLDataSet training)
            : base(network, training)
        {
            _success = true;

            _success = true;
            _delta = 0;
            _lambda2 = 0;
            _lambda = FirstLambda;
            _oldError = 0;
            _magP = 0;
            _restart = false;

            _weights = EngineArray.ArrayCopy(network.Flat.Weights);
            int numWeights = _weights.Length;

            _oldWeights = new double[numWeights];
            _oldGradient = new double[numWeights];

            _p = new double[numWeights];
            _r = new double[numWeights];

            _mustInit = true;
        }
コード例 #32
0
 /// <summary>
 /// Construct a Manhattan propagation training object.
 /// </summary>
 ///
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data to use.</param>
 /// <param name="learnRate">The learning rate.</param>
 public ManhattanPropagation(IContainsFlat network,
                             IMLDataSet training, double learnRate) : base(network, training)
 {
     FlatTraining = new TrainFlatNetworkManhattan(network.Flat,
                                                  Training, learnRate);
 }
コード例 #33
0
 /// <summary>
 /// Construct a Manhattan propagation training object.
 /// </summary>
 ///
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data to use.</param>
 /// <param name="learnRate">The learning rate.</param>
 public ManhattanPropagation(IContainsFlat network,
                             IMLDataSet training, double learnRate) : base(network, training)
 {
     FlatTraining = new TrainFlatNetworkManhattan(network.Flat,
                                                  Training, learnRate);
 }
コード例 #34
0
 /// <summary>
 /// Construct a QPROP trainer for flat networks.  Uses a learning rate of 2.
 /// </summary>
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data.</param>
 public QuickPropagation(IContainsFlat network, IMLDataSet training) : this(network, training, 2.0)
 {
 }
コード例 #35
0
 /// <summary>
 /// Construct a Manhattan propagation training object.
 /// </summary>
 ///
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data to use.</param>
 /// <param name="learnRate">The learning rate.</param>
 public ManhattanPropagation(IContainsFlat network,
                             IMLDataSet training, double learnRate) : base(network, training)
 {
     _learningRate  = learnRate;
     _zeroTolerance = RPROPConst.DefaultZeroTolerance;
 }
コード例 #36
0
 /// <summary>
 /// Construct a resilient training object, allow the training parameters to
 /// be specified. Usually the default parameters are acceptable for the
 /// resilient training algorithm. Therefore you should usually use the other
 /// constructor, that makes use of the default values.
 /// </summary>
 ///
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training set to use.</param>
 /// <param name="initialUpdate"></param>
 /// <param name="maxStep">The maximum that a delta can reach.</param>
 public ResilientPropagation(IContainsFlat network,
                             IMLDataSet training, double initialUpdate,
                             double maxStep) : base(network, training)
 {
     var rpropFlat = new TrainFlatNetworkResilient(
         network.Flat, Training,
         RPROPConst.DefaultZeroTolerance, initialUpdate, maxStep);
     FlatTraining = rpropFlat;
 }
コード例 #37
0
 public ITrain TrainNetwork(IContainsFlat network, IMLDataSet trainingSet)
 {
     return(new Encog.Neural.Networks.Training.Propagation.Resilient.ResilientPropagation(network, trainingSet));
 }
コード例 #38
0
 /// <summary>
 /// Construct a propagation object.
 /// </summary>
 ///
 /// <param name="network">The network.</param>
 /// <param name="training">The training set.</param>
 protected Propagation(IContainsFlat network, IMLDataSet training) : base(TrainingImplementationType.Iterative)
 {
     _network = network;
     Training = training;
 }
コード例 #39
0
 /// <summary>
 /// Create a class to train using backpropagation. Use auto learn rate and
 /// momentum. Use the CPU to train.
 /// </summary>
 ///
 /// <param name="network">The network that is to be trained.</param>
 /// <param name="training">The training data to be used for backpropagation.</param>
 public Backpropagation(IContainsFlat network, IMLDataSet training) : this(network, training, 0, 0)
 {
     AddStrategy(new SmartLearningRate());
     AddStrategy(new SmartMomentum());
 }
コード例 #40
0
ファイル: ResilientPropagation.cs プロジェクト: neismit/emds
 public ResilientPropagation(IContainsFlat network, IMLDataSet training, double initialUpdate, double maxStep)
     : base(network, training)
 {
     TrainFlatNetworkResilient resilient = new TrainFlatNetworkResilient(network.Flat, this.Training, 1E-17, initialUpdate, maxStep);
     base.FlatTraining = resilient;
 }
コード例 #41
0
 /// <summary>
 /// Construct an RPROP trainer, allows an OpenCL device to be specified. Use
 /// the defaults for all training parameters. Usually this is the constructor
 /// to use as the resilient training algorithm is designed for the default
 /// parameters to be acceptable for nearly all problems.
 /// </summary>
 ///
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training data to use.</param>
 public ResilientPropagation(IContainsFlat network,
                             IMLDataSet training)
     : this(network, training, RPROPConst.DefaultInitialUpdate, RPROPConst.DefaultMaxStep)
 {
 }
コード例 #42
0
 public StochasticGradientDescent(IContainsFlat network,
                                  IMLDataSet training) :
     this(network, training, new MersenneTwisterGenerateRandom())
 {
 }
コード例 #43
0
        /// <summary>
        /// Construct a propagation object.
        /// </summary>
        ///
        /// <param name="network">The network.</param>
        /// <param name="training">The training set.</param>
        protected Propagation(IContainsFlat network, IMLDataSet training)
            : base(TrainingImplementationType.Iterative)
        {
            _network = network;
            _flat = network.Flat;
            _training = training;

            Gradients = new double[_flat.Weights.Length];
            _lastGradient = new double[_flat.Weights.Length];

            _indexable = training;
            _numThreads = 0;
            _reportedException = null;
            FixFlatSpot = true;
            ErrorFunction = new LinearErrorFunction();
        }