/// <summary>
 /// Don't use this constructor, it is for serialization only.
 /// </summary>
 public FreeformPropagationTraining()
     : base(TrainingImplementationType.Iterative)
 {
     _network = null;
     _training = null;
     FixFlatSopt = true;
 }
 /// <summary>
 /// Construct the trainer.
 /// </summary>
 /// <param name="theNetwork">The network to train.</param>
 /// <param name="theTraining">The training data.</param>
 public FreeformPropagationTraining(FreeformNetwork theNetwork,
                                    IMLDataSet theTraining) :
     base(TrainingImplementationType.Iterative)
 {
     _network    = theNetwork;
     _training   = theTraining;
     FixFlatSopt = true;
 }
예제 #3
0
 /// <summary>
 /// Construct a back propagation trainer.
 /// </summary>
 /// <param name="theNetwork">The network to train.</param>
 /// <param name="theTraining">The training data to use. The coefficient for how much of the gradient is applied to each weight.</param>
 /// <param name="theLearningRate">The learning rate. The coefficient for how much of the previous delta is applied to each weight.</param>
 /// <param name="theMomentum">The momentum.</param>
 public FreeformBackPropagation(FreeformNetwork theNetwork,
                                IMLDataSet theTraining, double theLearningRate,
                                double theMomentum)
     : base(theNetwork, theTraining)
 {
     theNetwork.TempTrainingAllocate(1, 2);
     _learningRate = theLearningRate;
     _momentum     = theMomentum;
 }
 /// <summary>
 /// Construct a back propagation trainer.
 /// </summary>
 /// <param name="theNetwork">The network to train.</param>
 /// <param name="theTraining">The training data to use. The coefficient for how much of the gradient is applied to each weight.</param>
 /// <param name="theLearningRate">The learning rate. The coefficient for how much of the previous delta is applied to each weight.</param>
 /// <param name="theMomentum">The momentum.</param>
 public FreeformBackPropagation(FreeformNetwork theNetwork,
         IMLDataSet theTraining, double theLearningRate,
         double theMomentum)
     : base(theNetwork, theTraining)
 {
     theNetwork.TempTrainingAllocate(1, 2);
     _learningRate = theLearningRate;
     _momentum = theMomentum;
 }
 /// <summary>
 ///     Construct the RPROP trainer.
 /// </summary>
 /// <param name="theNetwork">The network to train.</param>
 /// <param name="theTraining">The training set.</param>
 /// <param name="initialUpdate">The initial update.</param>
 /// <param name="theMaxStep">The max step.</param>
 public FreeformResilientPropagation(FreeformNetwork theNetwork,
                                     IMLDataSet theTraining, double initialUpdate,
                                     double theMaxStep)
     : base(theNetwork, theTraining)
 {
     _maxStep = theMaxStep;
     theNetwork.TempTrainingAllocate(1, 4);
     theNetwork.PerformConnectionTask(c => c.SetTempTraining(TempUpdate,
                                                             initialUpdate));
 }
예제 #6
0
        public FreeformNetwork Create()
        {
            FreeformNetwork network = XOR.CreateTrainedFreeformXOR();

            XOR.VerifyXOR(network, 0.1);

            network.SetProperty("test", "test2");

            return(network);
        }
        public void Execute(IExampleInterface app)
        {
            TemporalXOR temp        = new TemporalXOR();
            IMLDataSet  trainingSet = temp.Generate(120);

            FreeformNetwork elmanNetwork       = FreeformNetwork.CreateElman(1, 6, 1, new ActivationSigmoid());
            FreeformNetwork feedforwardNetwork = FreeformNetwork.CreateFeedforward(1, 6, 0, 1, new ActivationSigmoid());

            double feedforwardError = TrainNetwork("feedforward", feedforwardNetwork, trainingSet);
            double elmanError       = TrainNetwork("elman", elmanNetwork, trainingSet);

            Console.WriteLine(@"Best error rate with Elman Network: " + elmanError);
            Console.WriteLine(@"Best error rate with Feedforward Network: "
                              + feedforwardError);
            Console.WriteLine(@"Elman should be able to get into the 10% range,\nfeedforward should not go below 25%.\nThe recurrent Elment net can learn better in this case.");
            Console.WriteLine(@"If your results are not as good, try rerunning, or perhaps training longer.");

            EncogFramework.Instance.Shutdown();
        }
        public static double TrainNetwork(String what,
                                          FreeformNetwork network, IMLDataSet trainingSet)
        {
            ICalculateScore score = new TrainingSetScore(trainingSet);

            IMLTrain trainAlt = new NeuralSimulatedAnnealing(
                network, score, 10, 2, 100);

            IMLTrain trainMain = new FreeformBackPropagation(network, trainingSet, 0.00001, 0.0);

            StopTrainingStrategy stop = new StopTrainingStrategy();

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);

            EncogUtility.TrainToError(trainMain, 0.01);

            return(trainMain.Error);
        }
예제 #9
0
        public void Execute(IExampleInterface app)
        {
            // create a neural network, without using a factory
            var            network      = new FreeformNetwork();
            IFreeformLayer inputLayer   = network.CreateInputLayer(2);
            IFreeformLayer hiddenLayer1 = network.CreateLayer(3);
            IFreeformLayer outputLayer  = network.CreateOutputLayer(1);

            network.ConnectLayers(inputLayer, hiddenLayer1, new ActivationSigmoid(), 1.0, false);
            network.ConnectLayers(hiddenLayer1, outputLayer, new ActivationSigmoid(), 1.0, false);

            network.Reset();

            // create training data
            IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);

            EncogUtility.TrainToError(network, trainingSet, 0.01);
            EncogUtility.Evaluate(network, trainingSet);

            EncogFramework.Instance.Shutdown();
        }
        public void Execute(IExampleInterface app)
        {
            // create a neural network, without using a factory
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 2));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            // create training data
            var trainingSet = new BasicMLDataSet(XORInput, XORIdeal);

            EncogUtility.TrainToError(network, trainingSet, 0.01);
            EncogUtility.Evaluate(network, trainingSet);

            var ff = new FreeformNetwork(network);

            EncogUtility.Evaluate(ff, trainingSet);

            EncogFramework.Instance.Shutdown();
        }
 /// <summary>
 ///     Construct the RPROP trainer.
 /// </summary>
 /// <param name="theNetwork">The network to train.</param>
 /// <param name="theTraining">The training set.</param>
 /// <param name="initialUpdate">The initial update.</param>
 /// <param name="theMaxStep">The max step.</param>
 public FreeformResilientPropagation(FreeformNetwork theNetwork,
     IMLDataSet theTraining, double initialUpdate,
     double theMaxStep)
     : base(theNetwork, theTraining)
 {
     _maxStep = theMaxStep;
     theNetwork.TempTrainingAllocate(1, 4);
     theNetwork.PerformConnectionTask(c => c.SetTempTraining(TempUpdate,
         initialUpdate));
 }
 /// <summary>
 /// Construct the RPROP trainer, Use default intiial update and max step.
 /// </summary>
 /// <param name="theNetwork">The network to train.</param>
 /// <param name="theTraining">The training set.</param>
 public FreeformResilientPropagation(FreeformNetwork theNetwork,
     IMLDataSet theTraining)
     : this(theNetwork, theTraining, RPROPConst.DefaultInitialUpdate,
         RPROPConst.DefaultMaxStep)
 {
 }
 /// <summary>
 /// Construct the RPROP trainer, Use default intiial update and max step.
 /// </summary>
 /// <param name="theNetwork">The network to train.</param>
 /// <param name="theTraining">The training set.</param>
 public FreeformResilientPropagation(FreeformNetwork theNetwork,
                                     IMLDataSet theTraining)
     : this(theNetwork, theTraining, RPROPConst.DefaultInitialUpdate,
            RPROPConst.DefaultMaxStep)
 {
 }
        /// <summary>
        /// Construct the trainer.
        /// </summary>
        /// <param name="theNetwork">The network to train.</param>
        /// <param name="theTraining">The training data.</param>
        public FreeformPropagationTraining(FreeformNetwork theNetwork,
			IMLDataSet theTraining)
            : base(TrainingImplementationType.Iterative)
        {
            _network = theNetwork;
            _training = theTraining;
            FixFlatSopt = true;
        }
예제 #15
0
 public void Validate(FreeformNetwork network)
 {
     network.ClearContext();
     XOR.VerifyXOR(network, 0.1);
 }
 /// <summary>
 /// Don't use this constructor, it is for serialization only.
 /// </summary>
 public FreeformPropagationTraining() : base(TrainingImplementationType.Iterative)
 {
     _network    = null;
     _training   = null;
     FixFlatSopt = true;
 }