Esempio n. 1
0
 protected CrossTraining(IMLMethod network, FoldedDataSet training)
     : base(TrainingImplementationType.Iterative)
 {
     this._x87a7fc6a72741c2e = network;
     this.Training = training;
     this._x3952df2eab48841c = training;
 }
Esempio n. 2
0
        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                          "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var    holder    = new ParamsHolder(args);
            double startTemp = holder.GetDouble(
                MLTrainFactory.PropertyTemperatureStart, false, 10);
            double stopTemp = holder.GetDouble(
                MLTrainFactory.PropertyTemperatureStop, false, 2);

            int cycles = holder.GetInt(MLTrainFactory.Cycles, false, 100);

            IMLTrain train = new NeuralSimulatedAnnealing(
                (BasicNetwork)method, score, startTemp, stopTemp, cycles);

            return(train);
        }
        /// <summary>
        /// The Nguyen-Widrow initialization algorithm is the following :
        /// 
        /// 1. Initialize all weight of hidden layers with (ranged) random values
        /// 2. For each hidden layer
        /// 2.1 calculate beta value, 0.7/// Nth(#neurons of input layer) root of
        /// #neurons of current layer 
        /// 2.2 for each synapse
        /// 2.1.1 for each weight 
        /// 2.1.2 Adjust weight by dividing by norm of weight for neuron and
        /// multiplying by beta value
        /// </summary>
        /// <param name="method">The network to randomize.</param>
        public override sealed void Randomize(IMLMethod method)
        {
            if (!(method is BasicNetwork))
            {
                throw new EncogError("Ngyyen Widrow only works on BasicNetwork.");
            }

            var network = (BasicNetwork) method;

            new RangeRandomizer(Min, Max).Randomize(network);

            int hiddenNeurons = 0;

            for (int i = 1; i < network.LayerCount - 1; i++)
            {
                hiddenNeurons += network.GetLayerTotalNeuronCount(i);
            }

            // can't really do much, use regular randomization
            if (hiddenNeurons < 1)
            {
                return;
            }

            _inputCount = network.InputCount;
            _beta = 0.7d*Math.Pow(hiddenNeurons, 1.0d/network.InputCount);

            base.Randomize(network);
        }
Esempio n. 4
0
        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                    "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);
            int populationSize = holder.GetInt(
                MLTrainFactory.PropertyPopulationSize, false, 5000);
            double mutation = holder.GetDouble(
                MLTrainFactory.PropertyMutation, false, 0.1d);
            double mate = holder.GetDouble(MLTrainFactory.PropertyMate,
                                           false, 0.25d);

            IMLTrain train = new NeuralGeneticAlgorithm((BasicNetwork) method,
                                                       new RangeRandomizer(-1, 1), score, populationSize, mutation,
                                                       mate);

            return train;
        }
        /// <summary>
        /// Perform the training option.
        /// </summary>
        public void Train()
        {
            // first, create the machine learning method
            var       methodFactory = new MLMethodFactory();
            IMLMethod method        = methodFactory.Create(Config.MethodType, Config.MethodArchitecture, Config.InputWindow, 1);

            // second, create the data set
            string     filename = FileUtil.CombinePath(new FileInfo(_path), Config.FilenameTrain).ToString();
            IMLDataSet dataSet  = EncogUtility.LoadEGB2Memory(new FileInfo(filename));

            // third, create the trainer
            var      trainFactory = new MLTrainFactory();
            IMLTrain train        = trainFactory.Create(method, dataSet, Config.TrainType, Config.TrainParams);

            // reset if improve is less than 1% over 5 cycles
            if (method is IMLResettable && !(train is ManhattanPropagation))
            {
                train.AddStrategy(new RequiredImprovementStrategy(500));
            }

            // fourth, train and evaluate.
            EncogUtility.TrainToError(train, Config.TargetError);
            method = train.Method;
            EncogDirectoryPersistence.SaveObject(FileUtil.CombinePath(new FileInfo(_path), Config.MethodName), method);

            // finally, write out what we did
            Console.WriteLine(@"Machine Learning Type: " + Config.MethodType);
            Console.WriteLine(@"Machine Learning Architecture: " + Config.MethodArchitecture);

            Console.WriteLine(@"Training Method: " + Config.TrainType);
            Console.WriteLine(@"Training Args: " + Config.TrainParams);
        }
Esempio n. 6
0
        public double CalculateError()
        {
            IMLMethod  method = ObtainMethod();
            IMLDataSet data   = ObtainTrainingSet();

            return(((IMLError)method).CalculateError(data));
        }
Esempio n. 7
0
        /// <inheritdoc />
        public void CalculateScore(IGenome g)
        {
            // try rewrite
            Rules.Rewrite(g);

            // decode
            IMLMethod phenotype = CODEC.Decode(g);
            double    score;

            // deal with invalid decode
            if (phenotype == null)
            {
                score = BestComparer.ShouldMinimize ? Double.PositiveInfinity : Double.NegativeInfinity;
            }
            else
            {
                var context = phenotype as IMLContext;
                if (context != null)
                {
                    context.ClearContext();
                }
                score = ScoreFunction.CalculateScore(phenotype);
            }

            // now set the scores
            g.Score         = score;
            g.AdjustedScore = score;
        }
Esempio n. 8
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr)
 {
     if (!(method is SupportVectorMachine))
     {
         throw new EncogError("SVM Train training cannot be used on a method of type: " + method.GetType().FullName);
     }
     double defaultValue = 1.0 / ((double) ((SupportVectorMachine) method).InputCount);
     while (true)
     {
         double num4;
         SVMTrain train;
         double num2 = 1.0;
         IDictionary<string, string> theParams = ArchitectureParse.ParseParams(argsStr);
         ParamsHolder holder = new ParamsHolder(theParams);
         double num3 = holder.GetDouble("GAMMA", false, defaultValue);
         do
         {
             num4 = holder.GetDouble("C", false, num2);
             train = new SVMTrain((SupportVectorMachine) method, training) {
                 Gamma = num3
             };
         }
         while (((uint) defaultValue) > uint.MaxValue);
         if ((((uint) num2) + ((uint) num3)) <= uint.MaxValue)
         {
             train.C = num4;
             return train;
         }
     }
 }
Esempio n. 9
0
        /// <summary>
        /// Create a SVM trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is SupportVectorMachine))
            {
                throw new EncogError(
                          "SVM Train training cannot be used on a method of type: "
                          + method.GetType().FullName);
            }

            double defaultGamma = 1.0d / ((SupportVectorMachine)method).InputCount;
            double defaultC     = 1.0d;

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var    holder = new ParamsHolder(args);
            double gamma  = holder.GetDouble(MLTrainFactory.PropertyGamma,
                                             false, defaultGamma);
            double c = holder.GetDouble(MLTrainFactory.PropertyC, false,
                                        defaultC);

            var result = new SVMTrain((SupportVectorMachine)method, training);

            result.Gamma = gamma;
            result.C     = c;
            return(result);
        }
Esempio n. 10
0
        public void Process(String methodName, String methodArchitecture, String trainerName, String trainerArgs,
                            int outputNeurons)
        {
            // first, create the machine learning method
            var       methodFactory = new MLMethodFactory();
            IMLMethod method        = methodFactory.Create(methodName, methodArchitecture, 2, outputNeurons);

            // second, create the data set
            IMLDataSet dataSet = new BasicMLDataSet(XORInput, XORIdeal);

            // third, create the trainer
            var      trainFactory = new MLTrainFactory();
            IMLTrain train        = trainFactory.Create(method, dataSet, trainerName, trainerArgs);

            // reset if improve is less than 1% over 5 cycles
            if (method is IMLResettable && !(train is ManhattanPropagation))
            {
                train.AddStrategy(new RequiredImprovementStrategy(500));
            }

            // fourth, train and evaluate.
            EncogUtility.TrainToError(train, 0.01);
            method = train.Method;
            EncogUtility.Evaluate((IMLRegression)method, dataSet);

            // finally, write out what we did
            Console.WriteLine(@"Machine Learning Type: " + methodName);
            Console.WriteLine(@"Machine Learning Architecture: " + methodArchitecture);

            Console.WriteLine(@"Training Method: " + trainerName);
            Console.WriteLine(@"Training Args: " + trainerArgs);
        }
 /// <summary>
 /// Construct a cross trainer.
 /// </summary>
 ///
 /// <param name="network">The network.</param>
 /// <param name="training">The training data.</param>
 protected CrossTraining(IMLMethod network, FoldedDataSet training)
     : base(TrainingImplementationType.Iterative)
 {
     _network = network;
     Training = training;
     _folded = training;
 }
        public double CalculateScore(IMLMethod network)
        {
            EncogLogisticsPilot pilot = new EncogLogisticsPilot((BasicNetwork)network, Metadata);
            var logisticOutput        = pilot.ScorePilot(CustomerOrders);

            return(logisticOutput.Score);
        }
Esempio n. 13
0
        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                          "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder         = new ParamsHolder(args);
            int populationSize = holder.GetInt(
                MLTrainFactory.PropertyPopulationSize, false, 5000);

            IMLTrain train = new MLMethodGeneticAlgorithm(() =>
            {
                IMLMethod result = (IMLMethod)ObjectCloner.DeepCopy(method);
                ((IMLResettable)result).Reset();
                return(result);
            }, score, populationSize);

            return(train);
        }
Esempio n. 14
0
        /// <summary>
        ///     Crossvalidate and fit.
        /// </summary>
        /// <param name="k">The number of folds.</param>
        /// <param name="shuffle">True if we should shuffle.</param>
        /// <returns>The trained method.</returns>
        public IMLMethod Crossvalidate(int k, bool shuffle)
        {
            var cross = new KFoldCrossvalidation(
                TrainingDataset, k);

            cross.Process(shuffle);

            int foldNumber = 0;

            foreach (DataFold fold in cross.Folds)
            {
                foldNumber++;
                Report.Report(k, foldNumber, "Fold #" + foldNumber);
                FitFold(k, foldNumber, fold);
            }

            double    sum        = 0;
            double    bestScore  = Double.PositiveInfinity;
            IMLMethod bestMethod = null;

            foreach (DataFold fold in cross.Folds)
            {
                sum += fold.Score;
                if (fold.Score < bestScore)
                {
                    bestScore  = fold.Score;
                    bestMethod = fold.Method;
                }
            }
            sum = sum / cross.Folds.Count;
            Report.Report(k, k, "Cross-validated score:" + sum);
            return(bestMethod);
        }
        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                    "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);
            int populationSize = holder.GetInt(
				MLTrainFactory.PropertyPopulationSize, false, 5000);
		
		IMLTrain train = new MLMethodGeneticAlgorithm( () => {
			
				IMLMethod result = (IMLMethod) ObjectCloner.DeepCopy(method);
				((IMLResettable)result).Reset();
				return result;
			}, score, populationSize);

		return train;

       
        }
        /// <summary>
        /// Create a SVM trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is SupportVectorMachine))
            {
                throw new EncogError(
                    "SVM Train training cannot be used on a method of type: "
                    + method.GetType().FullName);
            }

            double defaultGamma = 1.0d/((SupportVectorMachine) method).InputCount;
            double defaultC = 1.0d;

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);
            double gamma = holder.GetDouble(MLTrainFactory.PropertyGamma,
                                            false, defaultGamma);
            double c = holder.GetDouble(MLTrainFactory.PropertyC, false,
                                        defaultC);

            var result = new SVMTrain((SupportVectorMachine) method, training);
            result.Gamma = gamma;
            result.C = c;
            return result;
        }
Esempio n. 17
0
 public double CalculateScore(IMLMethod network)
 {
     var pilot = new NeuralRobot((BasicNetwork)network, false, RobotContol.SourceLocation, RobotContol.DestLocation);
     int score = pilot.ScorePilot();
     //RobotContol.Scores.Add(score);
     return score;
 }
Esempio n. 18
0
        /// <summary>
        ///     Perform the training.
        /// </summary>
        /// <param name="train">The training method.</param>
        /// <param name="method">The ML method.</param>
        /// <param name="trainingSet">The training set.</param>
        private void PerformTraining(IMLTrain train, IMLMethod method,
                                     IMLDataSet trainingSet)
        {
            ValidateNetwork.ValidateMethodToData(method, trainingSet);
            double targetError = Prop.GetPropertyDouble(
                ScriptProperties.MlTrainTargetError);

            Analyst.ReportTrainingBegin();
            int maxIteration = Analyst.MaxIteration;

            if (train.ImplementationType == TrainingImplementationType.OnePass)
            {
                train.Iteration();
                Analyst.ReportTraining(train);
            }
            else
            {
                do
                {
                    train.Iteration();
                    Analyst.ReportTraining(train);
                } while ((train.Error > targetError) &&
                         !Analyst.ShouldStopCommand() &&
                         !train.TrainingDone &&
                         ((maxIteration == -1) || (train.IterationNumber < maxIteration)));
            }
            train.FinishTraining();

            Analyst.ReportTrainingEnd();
        }
        /// <summary>
        /// The Nguyen-Widrow initialization algorithm is the following :
        ///
        /// 1. Initialize all weight of hidden layers with (ranged) random values
        /// 2. For each hidden layer
        /// 2.1 calculate beta value, 0.7/// Nth(#neurons of input layer) root of
        /// #neurons of current layer
        /// 2.2 for each synapse
        /// 2.1.1 for each weight
        /// 2.1.2 Adjust weight by dividing by norm of weight for neuron and
        /// multiplying by beta value
        /// </summary>
        /// <param name="method">The network to randomize.</param>
        public override sealed void Randomize(IMLMethod method)
        {
            if (!(method is BasicNetwork))
            {
                throw new EncogError("Ngyyen Widrow only works on BasicNetwork.");
            }

            var network = (BasicNetwork)method;

            new RangeRandomizer(Min, Max).Randomize(network);

            int hiddenNeurons = 0;

            for (int i = 1; i < network.LayerCount - 1; i++)
            {
                hiddenNeurons += network.GetLayerTotalNeuronCount(i);
            }

            // can't really do much, use regular randomization
            if (hiddenNeurons < 1)
            {
                return;
            }

            _inputCount = network.InputCount;
            _beta       = 0.7d * Math.Pow(hiddenNeurons, 1.0d / network.InputCount);

            base.Randomize(network);
        }
Esempio n. 20
0
 public override sealed void Randomize(IMLMethod method)
 {
     if (!(method is BasicNetwork))
     {
         throw new EncogError("Ngyyen Widrow only works on BasicNetwork.");
     }
     BasicNetwork network = (BasicNetwork) method;
     Label_00B3:
     new RangeRandomizer(base.Min, base.Max).Randomize(network);
     int num = 0;
     int l = 1;
     while (true)
     {
         if (l >= (network.LayerCount - 1))
         {
             if ((num >= 1) && ((((uint) num) + ((uint) l)) <= uint.MaxValue))
             {
                 this._x43f451310e815b76 = network.InputCount;
                 this._xd7d571ecee49d1e4 = 0.7 * Math.Pow((double) num, 1.0 / ((double) network.InputCount));
                 base.Randomize(network);
                 return;
             }
             return;
         }
         num += network.GetLayerTotalNeuronCount(l);
         if (((uint) l) < 0)
         {
             goto Label_00B3;
         }
         l++;
     }
 }
Esempio n. 21
0
        /// <summary>
        /// The constructor.
        /// </summary>
        /// <param name="theAlgorithm">The algorithm to fit.</param>
        /// <param name="theScore">The score function.</param>
        /// <param name="thePopulationSize">The population size.</param>
        public ContinuousACO(IMLMethod theAlgorithm, IScoreFunction theScore, int thePopulationSize)
        {
            Epsilon = .75;

            _algorithm      = theAlgorithm;
            _populationSize = thePopulationSize;
            _score          = theScore;
            Random          = new MersenneTwisterGenerateRandom();
            _paramCount     = theAlgorithm.LongTermMemory.Length;

            _population = new ContinuousAnt[thePopulationSize * 2];
            _weighting  = new double[thePopulationSize];
            for (int i = 0; i < _population.Length; i++)
            {
                _population[i] = new ContinuousAnt(_paramCount, _score.ShouldMinimize);
                for (int j = 0; j < _paramCount; j++)
                {
                    _population[i].Params[j] = Random.NextDouble(-1, 1);
                }
            }

            UpdateScore();
            Array.Sort(_population);
            ComputeWeighting();
            SampleSolutions();
            Array.Sort(_population);
        }
 public IGenome Encode(IMLMethod phenotype)
 {
     var rbfNet = (RBFNetwork) phenotype;
     var result = new DoubleArrayGenome(Size);
     Array.Copy(rbfNet.LongTermMemory, 0, result.Data, 0, _size);
     return result;
 }
Esempio n. 23
0
        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                          "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder         = new ParamsHolder(args);
            int populationSize = holder.GetInt(
                MLTrainFactory.PropertyPopulationSize, false, 5000);
            double mutation = holder.GetDouble(
                MLTrainFactory.PropertyMutation, false, 0.1d);
            double mate = holder.GetDouble(MLTrainFactory.PropertyMate,
                                           false, 0.25d);

            IMLTrain train = new NeuralGeneticAlgorithm((BasicNetwork)method,
                                                        new RangeRandomizer(-1, 1), score, populationSize, mutation,
                                                        mate);

            return(train);
        }
Esempio n. 24
0
        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                    "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);
            double startTemp = holder.GetDouble(
                MLTrainFactory.PropertyTemperatureStart, false, 10);
            double stopTemp = holder.GetDouble(
                MLTrainFactory.PropertyTemperatureStop, false, 2);

            int cycles = holder.GetInt(MLTrainFactory.Cycles, false, 100);

            IMLTrain train = new NeuralSimulatedAnnealing(
                (BasicNetwork) method, score, startTemp, stopTemp, cycles);

            return train;
        }
Esempio n. 25
0
        /// <summary>
        /// Create an NEAT GA trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            ICalculateScore score = new TrainingSetScore(training);
            TrainEA         train = NEATUtil.ConstructNEATTrainer((NEATPopulation)method, score);

            return(train);
        }
Esempio n. 26
0
        public IGenome Encode(IMLMethod phenotype)
        {
            var rbfNet = (RBFNetwork)phenotype;
            var result = new DoubleArrayGenome(Size);

            Array.Copy(rbfNet.LongTermMemory, 0, result.Data, 0, _size);
            return(result);
        }
 /// <inheritdoc />
 public double CalculateScore(IMLMethod genome)
 {
     var prg = (EncogProgram) genome;
     var pop = (PrgPopulation) prg.Population;
     IMLData inputData = new BasicMLData(pop.Context.DefinedVariables.Count);
     prg.Compute(inputData);
     return 0;
 }
Esempio n. 28
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string args)
 {
     if (!(method is BasicNetwork))
     {
         throw new EncogError("SCG training cannot be used on a method of type: " + method.GetType().FullName);
     }
     return new ScaledConjugateGradient((BasicNetwork) method, training);
 }
 public override double CalculateScore(IMLMethod network)
 {
     gameManager.Player2 = new NeuralColorPlayer(gameManager.Rules as VanDerWaerdenGameRules) { Network = network as BasicNetwork };
     var scores = new int[NGames];
     for (int i = 0; i < NGames; i++)
         scores[i] = gameManager.PlayGame();
     return CalculateScore(scores);
 }
        /// <summary>
        /// Create an NEAT GA trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                IMLDataSet training, String argsStr)
        {
            ICalculateScore score = new TrainingSetScore(training);
            TrainEA train = NEATUtil.ConstructNEATTrainer((NEATPopulation)method, score);

            return train;
        }
Esempio n. 31
0
 public static int NetworkSize(IMLMethod network)
 {
     if (!(network is IMLEncodable))
     {
         throw new NeuralNetworkError("This machine learning method cannot be encoded:" + network.GetType().FullName);
     }
     return ((IMLEncodable) network).EncodedArrayLength();
 }
Esempio n. 32
0
 public static void ArrayToNetwork(double[] array, IMLMethod network)
 {
     if (!(network is IMLEncodable))
     {
         throw new NeuralNetworkError("This machine learning method cannot be encoded:" + network.GetType().FullName);
     }
     ((IMLEncodable) network).DecodeFromArray(array);
 }
Esempio n. 33
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr)
 {
     if (!(method is SOMNetwork))
     {
         throw new EncogError("Cluster SOM training cannot be used on a method of type: " + method.GetType().FullName);
     }
     return new SOMClusterCopyTraining((SOMNetwork) method, training);
 }
Esempio n. 34
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string args)
 {
     if (!(method is RBFNetwork))
     {
         throw new EncogError("RBF-SVD training cannot be used on a method of type: " + method.GetType().FullName);
     }
     return new SVDTraining((RBFNetwork) method, training);
 }
Esempio n. 35
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string args)
 {
     if (!(method is BasicPNN))
     {
         throw new EncogError("PNN training cannot be used on a method of type: " + method.GetType().FullName);
     }
     return new TrainBasicPNN((BasicPNN) method, training);
 }
Esempio n. 36
0
        /// <summary>
        /// Returns total sum of distance between cities represnted in the chromosome
        /// </summary>
        /// <param name="phenotype"></param>
        /// <returns></returns>
        public double CalculateScore(IMLMethod phenotype)
        {
            try
            {
                FourBitCustomGenome genome     = (FourBitCustomGenome)phenotype;
                FourBitGene []      genomeData = ((FourBitCustomGenome)genome).Data;
                //double maxPossiblePower = PowerUnits.Sum(x => x.UnitCapacity);

                new PowerUnitGALogic().DisplayGeneAsString(genome, genomeData);

                var intervalFitnessDataRepository = new IntervalFitnessDataRepository(MaxPossiblePower);
                var intervalRawData = intervalFitnessDataRepository.IntervalRawData;

                for (int i = 0; i < NumberOfIntervals; i++)
                {
                    IntervalsFitnessData interval = intervalRawData[i];
                    //interval.MaxReserve = maxPossiblePower;
                    for (int j = 0; j < genomeData.Length; j++)
                    {
                        PowerUnit   powerUnit             = PowerUnits[j];
                        FourBitGene fourBitGene           = genomeData[j];
                        int         geneBitIndex          = i;
                        var         isPowerUnitMaintained = fourBitGene.Gene[geneBitIndex] == 1;
                        if (isPowerUnitMaintained)
                        {
                            interval.ReducedAmountOnMaintainance = interval.ReducedAmountOnMaintainance + (1 * powerUnit.UnitCapacity);
                        }
                        else
                        {
                            interval.ReducedAmountOnMaintainance = interval.ReducedAmountOnMaintainance + (0 * powerUnit.UnitCapacity);
                        }
                    }

                    var totalPowerReductionOnMaintanceAndUsage =
                        interval.PowerRequirement + interval.ReducedAmountOnMaintainance;
                    interval.ReserveAfterMaintainance = interval.MaxReserve - totalPowerReductionOnMaintanceAndUsage;
                    //if (interval.ReserveAfterMaintainance < 0.0)
                    //{
                    //    // the chromosome is not suitable for out requirement
                    //    chromosomeFitness = 0.0;
                    //}
                }

                var reserveAfterMaintainanceMin = intervalRawData.Min(x => x.ReserveAfterMaintainance);
                // minimal rerserve after maintainance and usage provides chormosomes fitness
                //var chromosomeFitness = reserveAfterMaintainanceMin > 0.0 ? reserveAfterMaintainanceMin : 0.0;
                var chromosomeFitness = reserveAfterMaintainanceMin;
                Console.WriteLine("\tFitness = {0} - of {1}, {2}, {3}, {4}", chromosomeFitness,
                                  intervalRawData[0].ReserveAfterMaintainance, intervalRawData[1].ReserveAfterMaintainance,
                                  intervalRawData[2].ReserveAfterMaintainance, intervalRawData[3].ReserveAfterMaintainance);
                return(chromosomeFitness);
            }
            catch (Exception e)
            {
                Console.WriteLine(e);
                throw;
            }
        }
 /// <summary>
 /// Determine the network size.
 /// </summary>
 ///
 /// <param name="network">The network.</param>
 /// <returns>The size.</returns>
 public static int NetworkSize(IMLMethod network)
 {
     if (network is IMLEncodable)
     {
         return(((IMLEncodable)network).EncodedArrayLength());
     }
     throw new NeuralNetworkError(Error
                                  + network.GetType().FullName);
 }
Esempio n. 38
0
        /// <inheritdoc />
        public double CalculateScore(IMLMethod genome)
        {
            var     prg       = (EncogProgram)genome;
            var     pop       = (PrgPopulation)prg.Population;
            IMLData inputData = new BasicMLData(pop.Context.DefinedVariables.Count);

            prg.Compute(inputData);
            return(0);
        }
Esempio n. 39
0
        static void Main(string[] args)
        {
            // used for prediction of time series
            // sin(x) in theory
            int DEGREES     = 360;
            int WINDOW_SIZE = 16;

            double[][] Input = new double[DEGREES][];
            double[][] Ideal = new double[DEGREES][];

            // Create array of sin signals
            for (int i = 0; i < DEGREES; i++)
            {
                Input[i] = new double[WINDOW_SIZE];
                Ideal[i] = new double[] { Math.Sin(DegreeToRad(i + WINDOW_SIZE)) };
                for (int j = 0; j < WINDOW_SIZE; j++)
                {
                    Input[i][j] = Math.Sin(DegreeToRad(i + j));
                }
            }
            // construct training set
            IMLDataSet trainingSet = new BasicMLDataSet(Input, Ideal);

            // construct an Elman type network
            // simple recurrent network
            ElmanPattern pattern = new ElmanPattern
            {
                InputNeurons       = WINDOW_SIZE,
                ActivationFunction = new ActivationSigmoid(),
                OutputNeurons      = 1
            };

            pattern.AddHiddenLayer(WINDOW_SIZE);
            IMLMethod    method  = pattern.Generate();
            BasicNetwork network = (BasicNetwork)method;
            // Train network
            IMLTrain train = new Backpropagation(network, trainingSet);
            var      stop  = new StopTrainingStrategy();

            train.AddStrategy(new Greedy());
            train.AddStrategy(stop);
            int epoch = 0;

            while (!stop.ShouldStop())
            {
                train.Iteration();
                Console.WriteLine($"Training Epoch #{epoch} Error:{train.Error}");
                epoch++;
            }
            // Test network
            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine($"actual={output[0]}, ideal={pair.Ideal[0]}");
            }
        }
Esempio n. 40
0
        /// <inheritdoc/>
        public double CalculateScore(IMLMethod method)
        {
            double result = 0;

            foreach (FitnessObjective obj in this.objectives)
            {
                result += obj.Score.CalculateScore(method) * obj.Weight;
            }

            return(result);
        }
 /// <summary>
 /// Use an array to populate the memory of the neural network.
 /// </summary>
 ///
 /// <param name="array">An array of doubles.</param>
 /// <param name="network">The network to encode.</param>
 public static void ArrayToNetwork(double[] array,
     IMLMethod network)
 {
     if (network is IMLEncodable)
     {
         ((IMLEncodable) network).DecodeFromArray(array);
         return;
     }
     throw new NeuralNetworkError(Error
                                  + network.GetType().FullName);
 }
Esempio n. 42
0
        public double CalculateScore(IMLMethod network)
        {
            double ret = 0;

            for (int i = 0; i < 10; i++)
            {
                Pond p = new Pond((BasicNetwork)network, null);
                ret += p.tick(100);
            }
            return(ret / 10);
        }
Esempio n. 43
0
        public double CalculateScore(IMLMethod network)
        {
            int cnt = Interlocked.Increment(ref sessionCnt);

            EncogLogisticSimulator sim = new EncogLogisticSimulator((BasicNetwork)network, false);
            var score = sim.CalculateScore(CustomerOrders, false);

            Console.WriteLine($"Session #{cnt} \t Score: {Math.Abs(score).ToString("$#,##0")}");

            return(score);
        }
Esempio n. 44
0
        /// <summary>
        /// Create a Nelder Mead trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);

            //final double learningRate = holder.getDouble(
            //		MLTrainFactory.PROPERTY_LEARNING_RATE, false, 0.1);

            return(new NelderMeadTraining((BasicNetwork)method, training));
        }
Esempio n. 45
0
        public void Execute(IExampleInterface app)
        {
            IMLDataSet trainingData = GenerateTraining(InputOutputCount, Compl);
            IMLMethod  method       = EncogUtility.SimpleFeedForward(InputOutputCount,
                                                                     HiddenCount, 0, InputOutputCount, false);
            var train = new LevenbergMarquardtTraining((BasicNetwork)method, trainingData);

            EncogUtility.TrainToError(train, 0.01);

            EncogFramework.Instance.Shutdown();
        }
 /// <summary>
 /// Use an array to populate the memory of the neural network.
 /// </summary>
 ///
 /// <param name="array">An array of doubles.</param>
 /// <param name="network">The network to encode.</param>
 public static void ArrayToNetwork(double[] array,
                                   IMLMethod network)
 {
     if (network is IMLEncodable)
     {
         ((IMLEncodable)network).DecodeFromArray(array);
         return;
     }
     throw new NeuralNetworkError(Error
                                  + network.GetType().FullName);
 }
        /// <summary>
        /// Create a quick propagation trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
            IMLDataSet training, String argsStr)
        {
            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);

            double learningRate = holder.GetDouble(
                MLTrainFactory.PropertyLearningRate, false, 2.0);

            return new QuickPropagation((BasicNetwork) method, training, learningRate);
        }
        /// <summary>
        /// Create a Nelder Mead trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                IMLDataSet training, String argsStr)
        {
            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);

            //final double learningRate = holder.getDouble(
            //		MLTrainFactory.PROPERTY_LEARNING_RATE, false, 0.1);

            return new NelderMeadTraining((BasicNetwork)method, training);
        }
        /// <inheritdoc/>
        public double CalculateScore(IMLMethod method)
        {
            double result = 0;

            foreach (FitnessObjective obj in this.objectives)
            {
                result += obj.Score.CalculateScore(method) * obj.Weight;
            }

            return result;
        }
Esempio n. 50
0
 /// <summary>
 ///     Is the specified method supported for code generation?
 /// </summary>
 /// <param name="method">The specified method.</param>
 /// <returns>True, if the specified method is supported.</returns>
 public static bool IsSupported(IMLMethod method)
 {
     if (method is BasicNetwork)
     {
         return(true);
     }
     else
     {
         return(false);
     }
 }
Esempio n. 51
0
        /// <summary>
        /// Create a quick propagation trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);

            double learningRate = holder.GetDouble(
                MLTrainFactory.PropertyLearningRate, false, 2.0);

            return(new QuickPropagation((BasicNetwork)method, training, learningRate));
        }
Esempio n. 52
0
 /// <summary>
 ///     Construct the simulated annealing trainer.
 /// </summary>
 /// <param name="theAlgorithm">The algorithm to optimize.</param>
 /// <param name="theScore">The score function.</param>
 /// <param name="theKMax">The max number of iterations.</param>
 /// <param name="theStartingTemperature">The starting temperature.</param>
 /// <param name="theEndingTemperature">The ending temperature.</param>
 public TrainAnneal(IMLMethod theAlgorithm, IScoreFunction theScore, int theKMax,
                    double theStartingTemperature, double theEndingTemperature)
 {
     _algorithm           = theAlgorithm;
     _score               = theScore;
     _kMax                = theKMax;
     _currentError        = _score.CalculateScore(_algorithm);
     _startingTemperature = theStartingTemperature;
     _endingTemperature   = theEndingTemperature;
     _globalBest          = new double[theAlgorithm.LongTermMemory.Length];
     Array.Copy(_algorithm.LongTermMemory, 0, _globalBest, 0, _globalBest.Length);
 }
Esempio n. 53
0
        /// <summary>
        /// Create a RBF-SVD trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="args">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String args)
        {
            if (!(method is RBFNetwork))
            {
                throw new EncogError(
                          "RBF-SVD training cannot be used on a method of type: "
                          + method.GetType().FullName);
            }

            return(new SVDTraining((RBFNetwork)method, training));
        }
Esempio n. 54
0
        public double CalculateScore(IMLMethod network)
        {
            var Player1 = new PlayerNetwork((BasicNetwork)network);
            var Score   = 0d;

            foreach (var Player2 in Adversaries)
            {
                var Game = PlayOneRound(Player1, Player2.Player, Player2.PlayerStart);
                Score += ScoreGame(Game, Player2);
            }
            return(Score);
        }
Esempio n. 55
0
        /// <inheritdoc/>
        public double CalculateScore(IMLMethod algo)
        {
            var ralgo = (IRegressionAlgorithm)algo;
            // evaulate
            _errorCalc.Clear();
            foreach (var pair in _trainingData)
            {
                double[] output = ralgo.ComputeRegression(pair.Input);
                _errorCalc.UpdateError(output, pair.Ideal, 1.0);
            }

            return _errorCalc.Calculate();
        }
Esempio n. 56
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr)
 {
     if (method is IContainsFlat)
     {
         ParamsHolder holder = new ParamsHolder(ArchitectureParse.ParseParams(argsStr));
         double initialUpdate = holder.GetDouble("INIT_UPDATE", false, 0.1);
         double maxStep = holder.GetDouble("MAX_STEP", false, 50.0);
         if ((((uint) initialUpdate) - ((uint) maxStep)) >= 0)
         {
             return new ResilientPropagation((IContainsFlat) method, training, initialUpdate, maxStep);
         }
     }
     throw new EncogError("RPROP training cannot be used on a method of type: " + method.GetType().FullName);
 }
Esempio n. 57
0
        /// <summary>
        /// Randomize the specified BasicNetwork.
        /// </summary>
        /// <param name="method">The network to randomize.</param>
        public void Randomize(IMLMethod method)
        {
            if (!(method is BasicNetwork))
            {
                throw new EncogError("Nguyen-Widrow only supports BasicNetwork.");
            }

            BasicNetwork network = (BasicNetwork)method;

            for (int fromLayer = 0; fromLayer < network.LayerCount - 1; fromLayer++)
            {
                RandomizeSynapse(network, fromLayer);
            }
        }
Esempio n. 58
0
        /// <inheritdoc/>
        public double CalculateScore(IMLMethod algo)
        {
            IErrorCalculation ec = ErrorCalc.Create();

            IRegressionAlgorithm ralgo = (IRegressionAlgorithm)algo;
            // evaulate
            ec.Clear();
            foreach (BasicData pair in _trainingData)
            {
                double[] output = ralgo.ComputeRegression(pair.Input);
                ec.UpdateError(output, pair.Ideal, 1.0);
            }

            return ec.Calculate();
        }
Esempio n. 59
0
        /// <summary>
        /// Create a PSO trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                IMLDataSet training, String argsStr)
        {
            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            ParamsHolder holder = new ParamsHolder(args);

            int particles = holder.GetInt(
                    MLTrainFactory.PropertyParticles, false, 20);

            ICalculateScore score = new TrainingSetScore(training);
            IRandomizer randomizer = new NguyenWidrowRandomizer();

            IMLTrain train = new NeuralPSO((BasicNetwork)method, randomizer, score, particles);

            return train;
        }
        /**
         * Create a K2 trainer.
         *
         * @param method
         *            The method to use.
         * @param training
         *            The training data to use.
         * @param argsStr
         *            The arguments to use.
         * @return The newly created trainer.
         */
        public IMLTrain Create(IMLMethod method,
			IMLDataSet training, String argsStr)
        {
            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            ParamsHolder holder = new ParamsHolder(args);

            int maxParents = holder.GetInt(
                MLTrainFactory.PropertyMaxParents, false, 1);
            String searchStr = holder.GetString("SEARCH", false, "k2");
            String estimatorStr = holder.GetString("ESTIMATOR", false, "simple");
            String initStr = holder.GetString("INIT", false, "naive");

            IBayesSearch search;
            IBayesEstimator estimator;
            BayesianInit init;

            if( string.Compare(searchStr,"k2",true)==0) {
            search = new SearchK2();
            } else if( string.Compare(searchStr,"none",true)==0) {
            search = new SearchNone();
            }
            else {
            throw new BayesianError("Invalid search type: " + searchStr);
            }

            if( string.Compare(estimatorStr,"simple",true)==0) {
            estimator = new SimpleEstimator();
            } else if( string.Compare(estimatorStr, "none",true)==0) {
            estimator = new EstimatorNone();
            }
            else {
            throw new BayesianError("Invalid estimator type: " + estimatorStr);
            }

            if( string.Compare(initStr, "simple") ==0) {
            init = BayesianInit.InitEmpty;
            } else if( string.Compare(initStr, "naive") ==0) {
            init = BayesianInit.InitNaiveBayes;
            } else if( string.Compare(initStr, "none") ==0) {
            init = BayesianInit.InitNoChange;
            }
            else {
            throw new BayesianError("Invalid init type: " + initStr);
            }

            return new TrainBayesian((BayesianNetwork) method, training, maxParents, init, search, estimator);
        }