/// <summary> /// The network that is to be trained. /// </summary> /// <param name="network">The training set.</param> /// <param name="training">The OpenCL profile to use, null for CPU.</param> /// <param name="profile">The OpenCL profile, or null for none.</param> /// <param name="learnRate">The rate at which the weight matrix will be adjusted based on /// learning.</param> /// <param name="momentum">The influence that previous iteration's training deltas will /// have on the current iteration.</param> public Backpropagation(BasicNetwork network, INeuralDataSet training, OpenCLTrainingProfile profile, double learnRate, double momentum) : base(network, training) { if (profile == null) { TrainFlatNetworkBackPropagation backFlat = new TrainFlatNetworkBackPropagation( network.Structure.Flat, this.Training, learnRate, momentum); this.FlatTraining = backFlat; } #if !SILVERLIGHT else { TrainFlatNetworkOpenCL rpropFlat = new TrainFlatNetworkOpenCL( network.Structure.Flat, this.Training, profile); rpropFlat.LearnBPROP(learnRate, momentum); this.FlatTraining = rpropFlat; } #endif }
/// <param name="network">The network that is to be trained</param> /// <param name="training">The training set</param> /// <param name="learnRate"></param> /// <param name="momentum"></param> public Backpropagation(IContainsFlat network, IMLDataSet training, double learnRate, double momentum) : base(network, training) { ValidateNetwork.ValidateMethodToData(network, training); var backFlat = new TrainFlatNetworkBackPropagation( network.Flat, Training, learnRate, momentum); FlatTraining = backFlat; }
/// <summary> /// Pause the training. /// </summary> /// <returns>A training continuation object to continue with.</returns> public override TrainingContinuation Pause() { TrainingContinuation result = new TrainingContinuation(); TrainFlatNetworkBackPropagation backFlat = (TrainFlatNetworkBackPropagation)FlatTraining; double[] d = backFlat.LastDelta; result[Backpropagation.LAST_DELTA] = d; return(result); }