Inheritance: TrainFlatNetworkProp
Esempio n. 1
0
        /// <summary>
        /// Program entry point.
        /// </summary>
        /// <param name="app">Holds arguments and other info.</param>
        public void Execute(IExampleInterface app)
        {
            var network = new FlatNetwork(2, 4, 0, 1, false);
            network.Randomize();

            IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);


            var train = new TrainFlatNetworkResilient(network, trainingSet);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.01);

            var output = new double[1];
            // test the neural network
            Console.WriteLine(@"Neural Network Results:");
            foreach (IMLDataPair pair in trainingSet)
            {
                double[] input = pair.Input.Data;
                network.Compute(input, output);
                Console.WriteLine(input[0] + @"," + input[1] + @":" + output[0]);
            }
        }
 /// <summary>
 /// Construct a resilient training object, allow the training parameters to
 /// be specified. Usually the default parameters are acceptable for the
 /// resilient training algorithm. Therefore you should usually use the other
 /// constructor, that makes use of the default values.
 /// </summary>
 ///
 /// <param name="network">The network to train.</param>
 /// <param name="training">The training set to use.</param>
 /// <param name="initialUpdate"></param>
 /// <param name="maxStep">The maximum that a delta can reach.</param>
 public ResilientPropagation(IContainsFlat network,
                             IMLDataSet training, double initialUpdate,
                             double maxStep) : base(network, training)
 {
     var rpropFlat = new TrainFlatNetworkResilient(
         network.Flat, Training,
         RPROPConst.DefaultZeroTolerance, initialUpdate, maxStep);
     FlatTraining = rpropFlat;
 }
Esempio n. 3
0
        /// <summary>
        /// Program entry point.
        /// </summary>
        /// <param name="app">Holds arguments and other info.</param>
        public void Execute(IExampleInterface app)
        {
            IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);

            FlatNetwork network = CreateNetwork();

            Console.WriteLine(@"Starting Weights:");
            DisplayWeights(network);
            Evaluate(network, trainingSet);

            TrainFlatNetworkResilient train = new TrainFlatNetworkResilient(
                    network, trainingSet);

            for (int iteration = 1; iteration <= ITERATIONS; iteration++)
            {
                train.Iteration();

                Console.WriteLine();
                Console.WriteLine(@"*** Iteration #" + iteration);
                Console.WriteLine(@"Error: " + train.Error);
                Evaluate(network, trainingSet);

                Console.WriteLine(@"LastGrad:"
                        + FormatArray(train.LastGradient));
                Console.WriteLine(@"Updates :"
                        + FormatArray(train.UpdateValues));

                DisplayWeights(network);
            }
        }
Esempio n. 4
0
 public ResilientPropagation(IContainsFlat network, IMLDataSet training, double initialUpdate, double maxStep)
     : base(network, training)
 {
     TrainFlatNetworkResilient resilient = new TrainFlatNetworkResilient(network.Flat, this.Training, 1E-17, initialUpdate, maxStep);
     base.FlatTraining = resilient;
 }