/// <summary> /// Construct a Manhattan propagation training object. /// </summary> /// <param name="network">The network to train.</param> /// <param name="training">The training data to use.</param> /// <param name="profile">The learning rate.</param> /// <param name="learnRate">The OpenCL profile to use, null for CPU.</param> public ManhattanPropagation(BasicNetwork network, INeuralDataSet training, OpenCLTrainingProfile profile, double learnRate) : base(network, training) { if (profile == null) { FlatTraining = new TrainFlatNetworkManhattan( network.Structure.Flat, this.Training, learnRate); } #if !SILVERLIGHT else { TrainFlatNetworkOpenCL rpropFlat = new TrainFlatNetworkOpenCL( network.Structure.Flat, this.Training, profile); rpropFlat.LearnManhattan(learnRate); this.FlatTraining = rpropFlat; } #endif }
/// <summary> /// Construct a resilient training object, allow the training parameters to /// be specified. Usually the default parameters are acceptable for the /// resilient training algorithm. Therefore you should usually use the other /// constructor, that makes use of the default values. /// </summary> /// <param name="network">The network to train.</param> /// <param name="training">The training set to use.</param> /// <param name="profile">Optional EncogCL profile to execute on.</param> /// <param name="initialUpdate">The initial update values, this is the amount that the deltas /// are all initially set to.</param> /// <param name="maxStep">The maximum that a delta can reach.</param> public ResilientPropagation(BasicNetwork network, INeuralDataSet training, OpenCLTrainingProfile profile, double initialUpdate, double maxStep) : base(network, training) { if (profile == null) { TrainFlatNetworkResilient rpropFlat = new TrainFlatNetworkResilient( network.Structure.Flat, this.Training); this.FlatTraining = rpropFlat; } #if !SILVERLIGHT else { TrainFlatNetworkOpenCL rpropFlat = new TrainFlatNetworkOpenCL( network.Structure.Flat, this.Training, profile); rpropFlat.LearnRPROP(initialUpdate, maxStep); this.FlatTraining = rpropFlat; } #endif }
/// <summary> /// The network that is to be trained. /// </summary> /// <param name="network">The training set.</param> /// <param name="training">The OpenCL profile to use, null for CPU.</param> /// <param name="profile">The OpenCL profile, or null for none.</param> /// <param name="learnRate">The rate at which the weight matrix will be adjusted based on /// learning.</param> /// <param name="momentum">The influence that previous iteration's training deltas will /// have on the current iteration.</param> public Backpropagation(BasicNetwork network, INeuralDataSet training, OpenCLTrainingProfile profile, double learnRate, double momentum) : base(network, training) { if (profile == null) { TrainFlatNetworkBackPropagation backFlat = new TrainFlatNetworkBackPropagation( network.Structure.Flat, this.Training, learnRate, momentum); this.FlatTraining = backFlat; } #if !SILVERLIGHT else { TrainFlatNetworkOpenCL rpropFlat = new TrainFlatNetworkOpenCL( network.Structure.Flat, this.Training, profile); rpropFlat.LearnBPROP(learnRate, momentum); this.FlatTraining = rpropFlat; } #endif }