Ejemplo n.º 1
0
 /// <summary>
 /// Construct a back propagation trainer.
 /// </summary>
 /// <param name="theNetwork">The network to train.</param>
 /// <param name="theTraining">The training data to use. The coefficient for how much of the gradient is applied to each weight.</param>
 /// <param name="theLearningRate">The learning rate. The coefficient for how much of the previous delta is applied to each weight.</param>
 /// <param name="theMomentum">The momentum.</param>
 public FreeformBackPropagation(FreeformNetwork theNetwork,
                                IMLDataSet theTraining, double theLearningRate,
                                double theMomentum)
     : base(theNetwork, theTraining)
 {
     theNetwork.TempTrainingAllocate(1, 2);
     _learningRate = theLearningRate;
     _momentum     = theMomentum;
 }
 /// <summary>
 /// Construct a back propagation trainer.
 /// </summary>
 /// <param name="theNetwork">The network to train.</param>
 /// <param name="theTraining">The training data to use. The coefficient for how much of the gradient is applied to each weight.</param>
 /// <param name="theLearningRate">The learning rate. The coefficient for how much of the previous delta is applied to each weight.</param>
 /// <param name="theMomentum">The momentum.</param>
 public FreeformBackPropagation(FreeformNetwork theNetwork,
         IMLDataSet theTraining, double theLearningRate,
         double theMomentum)
     : base(theNetwork, theTraining)
 {
     theNetwork.TempTrainingAllocate(1, 2);
     _learningRate = theLearningRate;
     _momentum = theMomentum;
 }
 /// <summary>
 ///     Construct the RPROP trainer.
 /// </summary>
 /// <param name="theNetwork">The network to train.</param>
 /// <param name="theTraining">The training set.</param>
 /// <param name="initialUpdate">The initial update.</param>
 /// <param name="theMaxStep">The max step.</param>
 public FreeformResilientPropagation(FreeformNetwork theNetwork,
                                     IMLDataSet theTraining, double initialUpdate,
                                     double theMaxStep)
     : base(theNetwork, theTraining)
 {
     _maxStep = theMaxStep;
     theNetwork.TempTrainingAllocate(1, 4);
     theNetwork.PerformConnectionTask(c => c.SetTempTraining(TempUpdate,
                                                             initialUpdate));
 }
 /// <summary>
 ///     Construct the RPROP trainer.
 /// </summary>
 /// <param name="theNetwork">The network to train.</param>
 /// <param name="theTraining">The training set.</param>
 /// <param name="initialUpdate">The initial update.</param>
 /// <param name="theMaxStep">The max step.</param>
 public FreeformResilientPropagation(FreeformNetwork theNetwork,
     IMLDataSet theTraining, double initialUpdate,
     double theMaxStep)
     : base(theNetwork, theTraining)
 {
     _maxStep = theMaxStep;
     theNetwork.TempTrainingAllocate(1, 4);
     theNetwork.PerformConnectionTask(c => c.SetTempTraining(TempUpdate,
         initialUpdate));
 }