/// <summary>
        /// Construct a resilient training object, allow the training parameters to
        /// be specified. Usually the default parameters are acceptable for the
        /// resilient training algorithm. Therefore you should usually use the other
        /// constructor, that makes use of the default values.
        /// </summary>
        ///
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training set to use.</param>
        /// <param name="initialUpdate"></param>
        /// <param name="maxStep">The maximum that a delta can reach.</param>
        public ResilientPropagation(IContainsFlat network,
                                    IMLDataSet training, double initialUpdate,
                                    double maxStep) : base(network, training)
        {
            var rpropFlat = new TrainFlatNetworkResilient(
                network.Flat, Training,
                RPROPConst.DefaultZeroTolerance, initialUpdate, maxStep);

            FlatTraining = rpropFlat;
        }
        /// <summary>
        /// Construct a resilient training object, allow the training parameters to
        /// be specified. Usually the default parameters are acceptable for the
        /// resilient training algorithm. Therefore you should usually use the other
        /// constructor, that makes use of the default values.
        /// </summary>
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training set to use.</param>
        /// <param name="profile">Optional EncogCL profile to execute on.</param>
        /// <param name="initialUpdate">The initial update values, this is the amount that the deltas
        /// are all initially set to.</param>
        /// <param name="maxStep">The maximum that a delta can reach.</param>
        public ResilientPropagation(BasicNetwork network,
                                    INeuralDataSet training, OpenCLTrainingProfile profile,
                                    double initialUpdate, double maxStep)
            : base(network, training)
        {
            if (profile == null)
            {
                TrainFlatNetworkResilient rpropFlat = new TrainFlatNetworkResilient(
                    network.Structure.Flat, this.Training);
                this.FlatTraining = rpropFlat;
            }
#if !SILVERLIGHT
            else
            {
                TrainFlatNetworkOpenCL rpropFlat = new TrainFlatNetworkOpenCL(
                    network.Structure.Flat, this.Training,
                    profile);
                rpropFlat.LearnRPROP(initialUpdate, maxStep);
                this.FlatTraining = rpropFlat;
            }
#endif
        }