GetInt() public method

Get a param as a integer.
public GetInt ( String name, bool required, int defaultValue ) : int
name String The name of the integer.
required bool True if this value is required.
defaultValue int The default value.
return int
コード例 #1
0
        /// <summary>
        /// Create a NEAT population.
        /// </summary>
        /// <param name="architecture">The architecture string to use.</param>
        /// <param name="input">The input count.</param>
        /// <param name="output">The output count.</param>
        /// <returns>The population.</returns>
        public IMLMethod Create(String architecture, int input,
                int output)
        {
            if (input <= 0)
            {
                throw new EncogError("Must have at least one input for NEAT.");
            }

            if (output <= 0)
            {
                throw new EncogError("Must have at least one output for NEAT.");
            }

            IDictionary<String, String> args = ArchitectureParse.ParseParams(architecture);
            ParamsHolder holder = new ParamsHolder(args);

            int populationSize = holder.GetInt(
                    MLMethodFactory.PropertyPopulationSize, false, 1000);

            int cycles = holder.GetInt(
                    MLMethodFactory.PropertyCycles, false, NEATPopulation.DefaultCycles);

            IActivationFunction af = this.factory.Create(
                    holder.GetString(MLMethodFactory.PropertyAF, false, MLActivationFactory.AF_SSIGMOID));

            NEATPopulation pop = new NEATPopulation(input, output, populationSize);
            pop.Reset();
            pop.ActivationCycles = cycles;
            pop.NEATActivationFunction = af;

            return pop;
        }
コード例 #2
0
        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                    "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);
            double startTemp = holder.GetDouble(
                MLTrainFactory.PropertyTemperatureStart, false, 10);
            double stopTemp = holder.GetDouble(
                MLTrainFactory.PropertyTemperatureStop, false, 2);

            int cycles = holder.GetInt(MLTrainFactory.Cycles, false, 100);

            IMLTrain train = new NeuralSimulatedAnnealing(
                (BasicNetwork) method, score, startTemp, stopTemp, cycles);

            return train;
        }
コード例 #3
0
        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                    "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);
            int populationSize = holder.GetInt(
				MLTrainFactory.PropertyPopulationSize, false, 5000);
		
		IMLTrain train = new MLMethodGeneticAlgorithm( () => {
			
				IMLMethod result = (IMLMethod) ObjectCloner.DeepCopy(method);
				((IMLResettable)result).Reset();
				return result;
			}, score, populationSize);

		return train;

       
        }
コード例 #4
0
ファイル: GeneticFactory.cs プロジェクト: neismit/emds
        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                    "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);
            int populationSize = holder.GetInt(
                MLTrainFactory.PropertyPopulationSize, false, 5000);
            double mutation = holder.GetDouble(
                MLTrainFactory.PropertyMutation, false, 0.1d);
            double mate = holder.GetDouble(MLTrainFactory.PropertyMate,
                                           false, 0.25d);

            IMLTrain train = new NeuralGeneticAlgorithm((BasicNetwork) method,
                                                       new RangeRandomizer(-1, 1), score, populationSize, mutation,
                                                       mate);

            return train;
        }
コード例 #5
0
        /// <summary>
        /// Create a PSO trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                IMLDataSet training, String argsStr)
        {
            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            ParamsHolder holder = new ParamsHolder(args);

            int particles = holder.GetInt(
                    MLTrainFactory.PropertyParticles, false, 20);

            ICalculateScore score = new TrainingSetScore(training);
            IRandomizer randomizer = new NguyenWidrowRandomizer();

            IMLTrain train = new NeuralPSO((BasicNetwork)method, randomizer, score, particles);

            return train;
        }
コード例 #6
0
        /**
         * Create a K2 trainer.
         *
         * @param method
         *            The method to use.
         * @param training
         *            The training data to use.
         * @param argsStr
         *            The arguments to use.
         * @return The newly created trainer.
         */
        public IMLTrain Create(IMLMethod method,
			IMLDataSet training, String argsStr)
        {
            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            ParamsHolder holder = new ParamsHolder(args);

            int maxParents = holder.GetInt(
                MLTrainFactory.PropertyMaxParents, false, 1);
            String searchStr = holder.GetString("SEARCH", false, "k2");
            String estimatorStr = holder.GetString("ESTIMATOR", false, "simple");
            String initStr = holder.GetString("INIT", false, "naive");

            IBayesSearch search;
            IBayesEstimator estimator;
            BayesianInit init;

            if( string.Compare(searchStr,"k2",true)==0) {
            search = new SearchK2();
            } else if( string.Compare(searchStr,"none",true)==0) {
            search = new SearchNone();
            }
            else {
            throw new BayesianError("Invalid search type: " + searchStr);
            }

            if( string.Compare(estimatorStr,"simple",true)==0) {
            estimator = new SimpleEstimator();
            } else if( string.Compare(estimatorStr, "none",true)==0) {
            estimator = new EstimatorNone();
            }
            else {
            throw new BayesianError("Invalid estimator type: " + estimatorStr);
            }

            if( string.Compare(initStr, "simple") ==0) {
            init = BayesianInit.InitEmpty;
            } else if( string.Compare(initStr, "naive") ==0) {
            init = BayesianInit.InitNaiveBayes;
            } else if( string.Compare(initStr, "none") ==0) {
            init = BayesianInit.InitNoChange;
            }
            else {
            throw new BayesianError("Invalid init type: " + initStr);
            }

            return new TrainBayesian((BayesianNetwork) method, training, maxParents, init, search, estimator);
        }
コード例 #7
0
ファイル: AnnealFactory.cs プロジェクト: neismit/emds
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr)
 {
     double num2;
     int num3;
     if (!(method is BasicNetwork))
     {
         throw new TrainingError("Invalid method type, requires BasicNetwork");
     }
     ICalculateScore calculateScore = new TrainingSetScore(training);
     ParamsHolder holder = new ParamsHolder(ArchitectureParse.ParseParams(argsStr));
     double startTemp = holder.GetDouble("startTemp", false, 10.0);
     if (((((uint) num3) & 0) != 0) || ((((uint) num2) - ((uint) num2)) < 0))
     {
         IMLTrain train;
         return train;
     }
     num2 = holder.GetDouble("stopTemp", false, 2.0);
     return new NeuralSimulatedAnnealing((BasicNetwork) method, calculateScore, startTemp, num2, holder.GetInt("cycles", false, 100));
 }
コード例 #8
0
        /// <summary>
        /// Create a feed forward network.
        /// </summary>
        /// <param name="architecture">The architecture string to use.</param>
        /// <param name="input">The input count.</param>
        /// <param name="output">The output count.</param>
        /// <returns>The feedforward network.</returns>
        public IMLMethod Create(String architecture, int input,
                int output)
        {

            if (input <= 0)
            {
                throw new EncogError("Must have at least one input for EPL.");
            }

            if (output <= 0)
            {
                throw new EncogError("Must have at least one output for EPL.");
            }


            IDictionary<String, String> args = ArchitectureParse.ParseParams(architecture);
            var holder = new ParamsHolder(args);

            int populationSize = holder.GetInt(
                    MLMethodFactory.PropertyPopulationSize, false, 1000);
            String variables = holder.GetString("vars", false, "x");
            String funct = holder.GetString("funct", false, null);

            var context = new EncogProgramContext();
            string[] tok = variables.Split(',');
            foreach (string v in tok)
            {
                context.DefineVariable(v);
            }

            if (String.Compare("numeric", funct, StringComparison.OrdinalIgnoreCase) == 0)
            {
                StandardExtensions.CreateNumericOperators(context);
            }

            var pop = new PrgPopulation(context, populationSize);

            if (context.Functions.Count > 0)
            {
                (new RampedHalfAndHalf(context, 2, 6)).Generate(new EncogRandom(), pop);
            }
            return pop;
        }
コード例 #9
0
ファイル: GeneticFactory.cs プロジェクト: neismit/emds
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr)
 {
     int num;
     double num3;
     IMLTrain train;
     if (!(method is BasicNetwork))
     {
         throw new TrainingError("Invalid method type, requires BasicNetwork");
     }
     ICalculateScore calculateScore = new TrainingSetScore(training);
     do
     {
         ParamsHolder holder = new ParamsHolder(ArchitectureParse.ParseParams(argsStr));
         num = holder.GetInt("population", false, 0x1388);
         double mutationPercent = holder.GetDouble("mutate", false, 0.1);
         num3 = holder.GetDouble("mate", false, 0.25);
         train = new NeuralGeneticAlgorithm((BasicNetwork) method, new RangeRandomizer(-1.0, 1.0), calculateScore, num, mutationPercent, num3);
     }
     while ((((uint) num) - ((uint) num3)) < 0);
     return train;
 }
コード例 #10
0
        /// <summary>
        /// Create a LMA trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is SupportVectorMachine))
            {
                throw new EncogError(
                    "Neighborhood training cannot be used on a method of type: "
                    + method.GetType().FullName);
            }

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);

            double learningRate = holder.GetDouble(
                MLTrainFactory.PropertyLearningRate, false, 0.7d);
            String neighborhoodStr = holder.GetString(
                MLTrainFactory.PropertyNeighborhood, false, "rbf");
            String rbfTypeStr = holder.GetString(
                MLTrainFactory.PropertyRBFType, false, "gaussian");

            RBFEnum t;

            if (rbfTypeStr.Equals("Gaussian", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.Gaussian;
            }
            else if (rbfTypeStr.Equals("Multiquadric", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.Multiquadric;
            }
            else if (rbfTypeStr.Equals("InverseMultiquadric", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.InverseMultiquadric;
            }
            else if (rbfTypeStr.Equals("MexicanHat", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.MexicanHat;
            }
            else
            {
                t = RBFEnum.Gaussian;
            }

            INeighborhoodFunction nf = null;

            if (neighborhoodStr.Equals("bubble", StringComparison.InvariantCultureIgnoreCase))
            {
                nf = new NeighborhoodBubble(1);
            }
            else if (neighborhoodStr.Equals("rbf", StringComparison.InvariantCultureIgnoreCase))
            {
                String str = holder.GetString(
                    MLTrainFactory.PropertyDimensions, true, null);
                int[] size = NumberList.FromListInt(CSVFormat.EgFormat, str);
                nf = new NeighborhoodRBF(size, t);
            }
            else if (neighborhoodStr.Equals("rbf1d", StringComparison.InvariantCultureIgnoreCase))
            {
                nf = new NeighborhoodRBF1D(t);
            }
            if (neighborhoodStr.Equals("single", StringComparison.InvariantCultureIgnoreCase))
            {
                nf = new NeighborhoodSingle();
            }

            var result = new BasicTrainSOM((SOMNetwork) method,
                                           learningRate, training, nf);

            if (args.ContainsKey(MLTrainFactory.PropertyIterations))
            {
                int plannedIterations = holder.GetInt(
                    MLTrainFactory.PropertyIterations, false, 1000);
                double startRate = holder.GetDouble(
                    MLTrainFactory.PropertyStartLearningRate, false, 0.05d);
                double endRate = holder.GetDouble(
                    MLTrainFactory.PropertyEndLearningRate, false, 0.05d);
                double startRadius = holder.GetDouble(
                    MLTrainFactory.PropertyStartRadius, false, 10);
                double endRadius = holder.GetDouble(
                    MLTrainFactory.PropertyEndRadius, false, 1);
                result.SetAutoDecay(plannedIterations, startRate, endRate,
                                    startRadius, endRadius);
            }

            return result;
        }
コード例 #11
0
        /// <summary>
        /// Create a RBF network.
        /// </summary>
        ///
        /// <param name="architecture">THe architecture string to use.</param>
        /// <param name="input">The input count.</param>
        /// <param name="output">The output count.</param>
        /// <returns>The RBF network.</returns>
        public IMLMethod Create(String architecture, int input,
            int output)
        {
            IList<String> layers = ArchitectureParse.ParseLayers(architecture);
            if (layers.Count != MaxLayers)
            {
                throw new EncogError(
                    "RBF Networks must have exactly three elements, "
                    + "separated by ->.");
            }

            ArchitectureLayer inputLayer = ArchitectureParse.ParseLayer(
                layers[0], input);
            ArchitectureLayer rbfLayer = ArchitectureParse.ParseLayer(
                layers[1], -1);
            ArchitectureLayer outputLayer = ArchitectureParse.ParseLayer(
                layers[2], output);

            int inputCount = inputLayer.Count;
            int outputCount = outputLayer.Count;

            RBFEnum t;

            if (rbfLayer.Name.Equals("Gaussian", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.Gaussian;
            }
            else if (rbfLayer.Name.Equals("Multiquadric", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.Multiquadric;
            }
            else if (rbfLayer.Name.Equals("InverseMultiquadric", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.InverseMultiquadric;
            }
            else if (rbfLayer.Name.Equals("MexicanHat", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.MexicanHat;
            }
            else
            {
                throw new NeuralNetworkError("Unknown RBF: " + rbfLayer.Name);
            }

            var holder = new ParamsHolder(rbfLayer.Params);

            int rbfCount = holder.GetInt("C", true, 0);

            var result = new RBFNetwork(inputCount, rbfCount,
                                        outputCount, t);

            return result;
        }
コード例 #12
0
        private void MenuFileOpen_Click(object sender, RoutedEventArgs e)
        {
            OpenFileDialog dlg = new OpenFileDialog();
            dlg.DefaultExt = ".eg"; // Default file extension
            dlg.Filter = "Encog EG Files (.EG)|*.eg"; // Filter files by extension

            Nullable<bool> result = dlg.ShowDialog();

            if (result == true)
            {
                FileInfo inf = new FileInfo(dlg.FileName);
                if (inf.Directory != null)
                {

                    var tempn=  Encog.Util.NetworkUtil.NetworkUtility.LoadNetwork(inf.Directory.ToString(), dlg.FileName);

                    Network = tempn;
                    }

                if (Network == null)
                {
                    MessageBox.Show("This does not appear to be an EG file created for this example.");
                    return;
                }

                this.Util = new GatherUtil();
                ParamsHolder xpa = new ParamsHolder(Network.Properties);

                this.Util.EvalWindow = xpa.GetInt("eval", true, 1);
                this.Util.PredictWindow = xpa.GetInt("predict", true, 1);

                this.Util.EvalWindow = xpa.GetInt("eval", true, 1);
                this.Util.PredictWindow = xpa.GetInt("predict", true, 1);

               // this.Util.EvalWindow = Convert.ToInt16(Network.Properties["eval"]);

            }
        }