Пример #1
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr)
 {
     if (!(method is SupportVectorMachine))
     {
         throw new EncogError("SVM Train training cannot be used on a method of type: " + method.GetType().FullName);
     }
     double defaultValue = 1.0 / ((double) ((SupportVectorMachine) method).InputCount);
     while (true)
     {
         double num4;
         SVMTrain train;
         double num2 = 1.0;
         IDictionary<string, string> theParams = ArchitectureParse.ParseParams(argsStr);
         ParamsHolder holder = new ParamsHolder(theParams);
         double num3 = holder.GetDouble("GAMMA", false, defaultValue);
         do
         {
             num4 = holder.GetDouble("C", false, num2);
             train = new SVMTrain((SupportVectorMachine) method, training) {
                 Gamma = num3
             };
         }
         while (((uint) defaultValue) > uint.MaxValue);
         if ((((uint) num2) + ((uint) num3)) <= uint.MaxValue)
         {
             train.C = num4;
             return train;
         }
     }
 }
Пример #2
0
        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                    "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);
            int populationSize = holder.GetInt(
                MLTrainFactory.PropertyPopulationSize, false, 5000);
            double mutation = holder.GetDouble(
                MLTrainFactory.PropertyMutation, false, 0.1d);
            double mate = holder.GetDouble(MLTrainFactory.PropertyMate,
                                           false, 0.25d);

            IMLTrain train = new NeuralGeneticAlgorithm((BasicNetwork) method,
                                                       new RangeRandomizer(-1, 1), score, populationSize, mutation,
                                                       mate);

            return train;
        }
Пример #3
0
        /// <summary>
        /// Create a NEAT population.
        /// </summary>
        /// <param name="architecture">The architecture string to use.</param>
        /// <param name="input">The input count.</param>
        /// <param name="output">The output count.</param>
        /// <returns>The population.</returns>
        public IMLMethod Create(String architecture, int input,
                int output)
        {
            if (input <= 0)
            {
                throw new EncogError("Must have at least one input for NEAT.");
            }

            if (output <= 0)
            {
                throw new EncogError("Must have at least one output for NEAT.");
            }

            IDictionary<String, String> args = ArchitectureParse.ParseParams(architecture);
            ParamsHolder holder = new ParamsHolder(args);

            int populationSize = holder.GetInt(
                    MLMethodFactory.PropertyPopulationSize, false, 1000);

            int cycles = holder.GetInt(
                    MLMethodFactory.PropertyCycles, false, NEATPopulation.DefaultCycles);

            IActivationFunction af = this.factory.Create(
                    holder.GetString(MLMethodFactory.PropertyAF, false, MLActivationFactory.AF_SSIGMOID));

            NEATPopulation pop = new NEATPopulation(input, output, populationSize);
            pop.Reset();
            pop.ActivationCycles = cycles;
            pop.NEATActivationFunction = af;

            return pop;
        }
        /// <summary>
        /// Create a SVM trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is SupportVectorMachine))
            {
                throw new EncogError(
                    "SVM Train training cannot be used on a method of type: "
                    + method.GetType().FullName);
            }

            double defaultGamma = 1.0d/((SupportVectorMachine) method).InputCount;
            double defaultC = 1.0d;

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);
            double gamma = holder.GetDouble(MLTrainFactory.PropertyGamma,
                                            false, defaultGamma);
            double c = holder.GetDouble(MLTrainFactory.PropertyC, false,
                                        defaultC);

            var result = new SVMTrain((SupportVectorMachine) method, training);
            result.Gamma = gamma;
            result.C = c;
            return result;
        }
        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                    "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);
            int populationSize = holder.GetInt(
				MLTrainFactory.PropertyPopulationSize, false, 5000);
		
		IMLTrain train = new MLMethodGeneticAlgorithm( () => {
			
				IMLMethod result = (IMLMethod) ObjectCloner.DeepCopy(method);
				((IMLResettable)result).Reset();
				return result;
			}, score, populationSize);

		return train;

       
        }
Пример #6
0
        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                    "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);
            double startTemp = holder.GetDouble(
                MLTrainFactory.PropertyTemperatureStart, false, 10);
            double stopTemp = holder.GetDouble(
                MLTrainFactory.PropertyTemperatureStop, false, 2);

            int cycles = holder.GetInt(MLTrainFactory.Cycles, false, 100);

            IMLTrain train = new NeuralSimulatedAnnealing(
                (BasicNetwork) method, score, startTemp, stopTemp, cycles);

            return train;
        }
        /// <summary>
        /// Create a quick propagation trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
            IMLDataSet training, String argsStr)
        {
            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);

            double learningRate = holder.GetDouble(
                MLTrainFactory.PropertyLearningRate, false, 2.0);

            return new QuickPropagation((BasicNetwork) method, training, learningRate);
        }
        /// <summary>
        /// Create a Nelder Mead trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                IMLDataSet training, String argsStr)
        {
            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);

            //final double learningRate = holder.getDouble(
            //		MLTrainFactory.PROPERTY_LEARNING_RATE, false, 0.1);

            return new NelderMeadTraining((BasicNetwork)method, training);
        }
Пример #9
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr)
 {
     if (method is IContainsFlat)
     {
         ParamsHolder holder = new ParamsHolder(ArchitectureParse.ParseParams(argsStr));
         double initialUpdate = holder.GetDouble("INIT_UPDATE", false, 0.1);
         double maxStep = holder.GetDouble("MAX_STEP", false, 50.0);
         if ((((uint) initialUpdate) - ((uint) maxStep)) >= 0)
         {
             return new ResilientPropagation((IContainsFlat) method, training, initialUpdate, maxStep);
         }
     }
     throw new EncogError("RPROP training cannot be used on a method of type: " + method.GetType().FullName);
 }
Пример #10
0
        /// <summary>
        /// Create a PSO trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                IMLDataSet training, String argsStr)
        {
            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            ParamsHolder holder = new ParamsHolder(args);

            int particles = holder.GetInt(
                    MLTrainFactory.PropertyParticles, false, 20);

            ICalculateScore score = new TrainingSetScore(training);
            IRandomizer randomizer = new NguyenWidrowRandomizer();

            IMLTrain train = new NeuralPSO((BasicNetwork)method, randomizer, score, particles);

            return train;
        }
        /**
         * Create a K2 trainer.
         *
         * @param method
         *            The method to use.
         * @param training
         *            The training data to use.
         * @param argsStr
         *            The arguments to use.
         * @return The newly created trainer.
         */
        public IMLTrain Create(IMLMethod method,
			IMLDataSet training, String argsStr)
        {
            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            ParamsHolder holder = new ParamsHolder(args);

            int maxParents = holder.GetInt(
                MLTrainFactory.PropertyMaxParents, false, 1);
            String searchStr = holder.GetString("SEARCH", false, "k2");
            String estimatorStr = holder.GetString("ESTIMATOR", false, "simple");
            String initStr = holder.GetString("INIT", false, "naive");

            IBayesSearch search;
            IBayesEstimator estimator;
            BayesianInit init;

            if( string.Compare(searchStr,"k2",true)==0) {
            search = new SearchK2();
            } else if( string.Compare(searchStr,"none",true)==0) {
            search = new SearchNone();
            }
            else {
            throw new BayesianError("Invalid search type: " + searchStr);
            }

            if( string.Compare(estimatorStr,"simple",true)==0) {
            estimator = new SimpleEstimator();
            } else if( string.Compare(estimatorStr, "none",true)==0) {
            estimator = new EstimatorNone();
            }
            else {
            throw new BayesianError("Invalid estimator type: " + estimatorStr);
            }

            if( string.Compare(initStr, "simple") ==0) {
            init = BayesianInit.InitEmpty;
            } else if( string.Compare(initStr, "naive") ==0) {
            init = BayesianInit.InitNaiveBayes;
            } else if( string.Compare(initStr, "none") ==0) {
            init = BayesianInit.InitNoChange;
            }
            else {
            throw new BayesianError("Invalid init type: " + initStr);
            }

            return new TrainBayesian((BayesianNetwork) method, training, maxParents, init, search, estimator);
        }
Пример #12
0
        /// <summary>
        /// Create a LMA trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
            IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new EncogError(
                    "LMA training cannot be used on a method of type: "
                    + method.GetType().FullName);
            }

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);

            var result = new LevenbergMarquardtTraining(
                (BasicNetwork) method, training);
            return result;
        }
Пример #13
0
        /// <summary>
        /// Create a LMA trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new EncogError(
                    "LMA training cannot be used on a method of type: "
                    + method.GetType().FullName);
            }

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);
            bool useReg = holder.GetBoolean(
                MLTrainFactory.PropertyBayesianRegularization, false, false);

            var result = new LevenbergMarquardtTraining(
                (BasicNetwork) method, training) {UseBayesianRegularization = useReg};
            return result;
        }
Пример #14
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr)
 {
     double num2;
     int num3;
     if (!(method is BasicNetwork))
     {
         throw new TrainingError("Invalid method type, requires BasicNetwork");
     }
     ICalculateScore calculateScore = new TrainingSetScore(training);
     ParamsHolder holder = new ParamsHolder(ArchitectureParse.ParseParams(argsStr));
     double startTemp = holder.GetDouble("startTemp", false, 10.0);
     if (((((uint) num3) & 0) != 0) || ((((uint) num2) - ((uint) num2)) < 0))
     {
         IMLTrain train;
         return train;
     }
     num2 = holder.GetDouble("stopTemp", false, 2.0);
     return new NeuralSimulatedAnnealing((BasicNetwork) method, calculateScore, startTemp, num2, holder.GetInt("cycles", false, 100));
 }
        /// <summary>
        /// Create a feed forward network.
        /// </summary>
        /// <param name="architecture">The architecture string to use.</param>
        /// <param name="input">The input count.</param>
        /// <param name="output">The output count.</param>
        /// <returns>The feedforward network.</returns>
        public IMLMethod Create(String architecture, int input,
                int output)
        {

            if (input <= 0)
            {
                throw new EncogError("Must have at least one input for EPL.");
            }

            if (output <= 0)
            {
                throw new EncogError("Must have at least one output for EPL.");
            }


            IDictionary<String, String> args = ArchitectureParse.ParseParams(architecture);
            var holder = new ParamsHolder(args);

            int populationSize = holder.GetInt(
                    MLMethodFactory.PropertyPopulationSize, false, 1000);
            String variables = holder.GetString("vars", false, "x");
            String funct = holder.GetString("funct", false, null);

            var context = new EncogProgramContext();
            string[] tok = variables.Split(',');
            foreach (string v in tok)
            {
                context.DefineVariable(v);
            }

            if (String.Compare("numeric", funct, StringComparison.OrdinalIgnoreCase) == 0)
            {
                StandardExtensions.CreateNumericOperators(context);
            }

            var pop = new PrgPopulation(context, populationSize);

            if (context.Functions.Count > 0)
            {
                (new RampedHalfAndHalf(context, 2, 6)).Generate(new EncogRandom(), pop);
            }
            return pop;
        }
Пример #16
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr)
 {
     int num;
     double num3;
     IMLTrain train;
     if (!(method is BasicNetwork))
     {
         throw new TrainingError("Invalid method type, requires BasicNetwork");
     }
     ICalculateScore calculateScore = new TrainingSetScore(training);
     do
     {
         ParamsHolder holder = new ParamsHolder(ArchitectureParse.ParseParams(argsStr));
         num = holder.GetInt("population", false, 0x1388);
         double mutationPercent = holder.GetDouble("mutate", false, 0.1);
         num3 = holder.GetDouble("mate", false, 0.25);
         train = new NeuralGeneticAlgorithm((BasicNetwork) method, new RangeRandomizer(-1.0, 1.0), calculateScore, num, mutationPercent, num3);
     }
     while ((((uint) num) - ((uint) num3)) < 0);
     return train;
 }
Пример #17
0
        /// <summary>
        /// Create a RPROP trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is IContainsFlat))
            {
                throw new EncogError(
                    "RPROP training cannot be used on a method of type: "
                    + method.GetType().FullName);
            }

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);
            double initialUpdate = holder.GetDouble(
                MLTrainFactory.PropertyInitialUpdate, false,
                RPROPConst.DefaultInitialUpdate);
            double maxStep = holder.GetDouble(
                MLTrainFactory.PropertyMaxStep, false,
                RPROPConst.DefaultMaxStep);

            return new ResilientPropagation((IContainsFlat) method, training,
                                            initialUpdate, maxStep);
        }
Пример #18
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr)
 {
     bool flag;
     LevenbergMarquardtTraining training3;
     if (method is BasicNetwork)
     {
         flag = new ParamsHolder(ArchitectureParse.ParseParams(argsStr)).GetBoolean("BAYES_REG", false, false);
         training3 = new LevenbergMarquardtTraining((BasicNetwork) method, training);
         if (3 == 0)
         {
             LevenbergMarquardtTraining training2;
             return training2;
         }
     }
     else if (0 == 0)
     {
         throw new EncogError("LMA training cannot be used on a method of type: " + method.GetType().FullName);
     }
     training3.UseBayesianRegularization = flag;
     if (0 == 0)
     {
     }
     return training3;
 }
Пример #19
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr)
 {
     IDictionary<string, string> dictionary;
     ParamsHolder holder;
     double num;
     string str;
     string str2;
     RBFEnum mexicanHat;
     INeighborhoodFunction function;
     string str3;
     int[] numArray;
     BasicTrainSOM nsom;
     int num2;
     double num3;
     double num4;
     double num6;
     if (method is SupportVectorMachine)
     {
         dictionary = ArchitectureParse.ParseParams(argsStr);
         holder = new ParamsHolder(dictionary);
         num = holder.GetDouble("LR", false, 0.7);
         str = holder.GetString("NEIGHBORHOOD", false, "rbf");
         if (2 != 0)
         {
             goto Label_03DF;
         }
         goto Label_039F;
     }
     goto Label_03F4;
     Label_0083:
     num4 = holder.GetDouble("END_LR", false, 0.05);
     double startRadius = holder.GetDouble("START_RADIUS", false, 10.0);
     if ((((uint) num4) + ((uint) num4)) > uint.MaxValue)
     {
         return nsom;
     }
     if ((((uint) num3) + ((uint) num2)) <= uint.MaxValue)
     {
         num6 = holder.GetDouble("END_RADIUS", false, 1.0);
         nsom.SetAutoDecay(num2, num3, num4, startRadius, num6);
         return nsom;
     }
     Label_00E4:
     if (4 == 0)
     {
         if ((((uint) num3) + ((uint) num2)) > uint.MaxValue)
         {
             goto Label_0292;
         }
         goto Label_02F8;
     }
     Label_00EE:
     nsom = new BasicTrainSOM((SOMNetwork) method, num, training, function);
     do
     {
         if (dictionary.ContainsKey("ITERATIONS"))
         {
             do
             {
                 num2 = holder.GetInt("ITERATIONS", false, 0x3e8);
                 num3 = holder.GetDouble("START_LR", false, 0.05);
             }
             while ((((uint) num3) | 15) == 0);
             goto Label_0083;
         }
     }
     while ((((uint) num6) | 0xff) == 0);
     if (0 == 0)
     {
         if ((((uint) num3) + ((uint) num3)) >= 0)
         {
             return nsom;
         }
         goto Label_03F4;
     }
     if ((((uint) num2) - ((uint) startRadius)) <= uint.MaxValue)
     {
         goto Label_00E4;
     }
     goto Label_0083;
     Label_0184:
     if (!str.Equals("single", StringComparison.InvariantCultureIgnoreCase))
     {
         goto Label_00EE;
     }
     function = new NeighborhoodSingle();
     if ((((uint) num6) - ((uint) num3)) >= 0)
     {
         if ((((uint) num) - ((uint) startRadius)) >= 0)
         {
             goto Label_00E4;
         }
         goto Label_0324;
     }
     if ((((uint) num2) & 0) == 0)
     {
         goto Label_0233;
     }
     Label_01E2:
     while (!str.Equals("rbf1d", StringComparison.InvariantCultureIgnoreCase))
     {
         if (0 == 0)
         {
             if ((((uint) num3) - ((uint) num6)) >= 0)
             {
                 goto Label_0184;
             }
             goto Label_0233;
         }
     }
     function = new NeighborhoodRBF1D(mexicanHat);
     if ((((uint) num2) + ((uint) num)) >= 0)
     {
         if (((uint) num6) < 0)
         {
             goto Label_01E2;
         }
         goto Label_0184;
     }
     if (((uint) num2) < 0)
     {
         goto Label_03DF;
     }
     goto Label_01E2;
     Label_0233:
     function = new NeighborhoodRBF(numArray, mexicanHat);
     goto Label_0184;
     Label_0243:
     if (!str.Equals("rbf", StringComparison.InvariantCultureIgnoreCase))
     {
         if ((((uint) num2) & 0) != 0)
         {
             goto Label_03DF;
         }
         goto Label_01E2;
     }
     Label_0292:
     str3 = holder.GetString("DIM", true, null);
     if ((((uint) num3) + ((uint) num)) > uint.MaxValue)
     {
         goto Label_0292;
     }
     numArray = NumberList.FromListInt(CSVFormat.EgFormat, str3);
     if ((((uint) num6) & 0) == 0)
     {
         goto Label_0233;
     }
     goto Label_0243;
     Label_02F8:
     if (str.Equals("bubble", StringComparison.InvariantCultureIgnoreCase))
     {
         function = new NeighborhoodBubble(1);
         goto Label_0184;
     }
     if ((((uint) num3) & 0) == 0)
     {
         goto Label_0243;
     }
     goto Label_0292;
     Label_0324:
     function = null;
     goto Label_02F8;
     Label_0362:
     mexicanHat = RBFEnum.Multiquadric;
     goto Label_0324;
     Label_039F:
     mexicanHat = RBFEnum.Gaussian;
     goto Label_0324;
     Label_03DF:
     str2 = holder.GetString("RBF_TYPE", false, "gaussian");
     if (str2.Equals("Gaussian", StringComparison.InvariantCultureIgnoreCase))
     {
         goto Label_039F;
     }
     if (((uint) startRadius) <= uint.MaxValue)
     {
         if (str2.Equals("Multiquadric", StringComparison.InvariantCultureIgnoreCase))
         {
             goto Label_0362;
         }
         if (!str2.Equals("InverseMultiquadric", StringComparison.InvariantCultureIgnoreCase) || ((((uint) num2) + ((uint) num2)) < 0))
         {
             if (str2.Equals("MexicanHat", StringComparison.InvariantCultureIgnoreCase))
             {
                 mexicanHat = RBFEnum.MexicanHat;
             }
             else
             {
                 mexicanHat = RBFEnum.Gaussian;
             }
             goto Label_0324;
         }
     }
     else if (((uint) num3) <= uint.MaxValue)
     {
         goto Label_0362;
     }
     if ((((uint) num3) - ((uint) num3)) <= uint.MaxValue)
     {
         mexicanHat = RBFEnum.InverseMultiquadric;
         goto Label_0324;
     }
     goto Label_00E4;
     Label_03F4:
     throw new EncogError("Neighborhood training cannot be used on a method of type: " + method.GetType().FullName);
 }
        /// <summary>
        /// Create a LMA trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is SupportVectorMachine))
            {
                throw new EncogError(
                    "Neighborhood training cannot be used on a method of type: "
                    + method.GetType().FullName);
            }

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);

            double learningRate = holder.GetDouble(
                MLTrainFactory.PropertyLearningRate, false, 0.7d);
            String neighborhoodStr = holder.GetString(
                MLTrainFactory.PropertyNeighborhood, false, "rbf");
            String rbfTypeStr = holder.GetString(
                MLTrainFactory.PropertyRBFType, false, "gaussian");

            RBFEnum t;

            if (rbfTypeStr.Equals("Gaussian", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.Gaussian;
            }
            else if (rbfTypeStr.Equals("Multiquadric", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.Multiquadric;
            }
            else if (rbfTypeStr.Equals("InverseMultiquadric", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.InverseMultiquadric;
            }
            else if (rbfTypeStr.Equals("MexicanHat", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.MexicanHat;
            }
            else
            {
                t = RBFEnum.Gaussian;
            }

            INeighborhoodFunction nf = null;

            if (neighborhoodStr.Equals("bubble", StringComparison.InvariantCultureIgnoreCase))
            {
                nf = new NeighborhoodBubble(1);
            }
            else if (neighborhoodStr.Equals("rbf", StringComparison.InvariantCultureIgnoreCase))
            {
                String str = holder.GetString(
                    MLTrainFactory.PropertyDimensions, true, null);
                int[] size = NumberList.FromListInt(CSVFormat.EgFormat, str);
                nf = new NeighborhoodRBF(size, t);
            }
            else if (neighborhoodStr.Equals("rbf1d", StringComparison.InvariantCultureIgnoreCase))
            {
                nf = new NeighborhoodRBF1D(t);
            }
            if (neighborhoodStr.Equals("single", StringComparison.InvariantCultureIgnoreCase))
            {
                nf = new NeighborhoodSingle();
            }

            var result = new BasicTrainSOM((SOMNetwork) method,
                                           learningRate, training, nf);

            if (args.ContainsKey(MLTrainFactory.PropertyIterations))
            {
                int plannedIterations = holder.GetInt(
                    MLTrainFactory.PropertyIterations, false, 1000);
                double startRate = holder.GetDouble(
                    MLTrainFactory.PropertyStartLearningRate, false, 0.05d);
                double endRate = holder.GetDouble(
                    MLTrainFactory.PropertyEndLearningRate, false, 0.05d);
                double startRadius = holder.GetDouble(
                    MLTrainFactory.PropertyStartRadius, false, 10);
                double endRadius = holder.GetDouble(
                    MLTrainFactory.PropertyEndRadius, false, 1);
                result.SetAutoDecay(plannedIterations, startRate, endRate,
                                    startRadius, endRadius);
            }

            return result;
        }
        /// <summary>
        /// Create a RBF network.
        /// </summary>
        ///
        /// <param name="architecture">THe architecture string to use.</param>
        /// <param name="input">The input count.</param>
        /// <param name="output">The output count.</param>
        /// <returns>The RBF network.</returns>
        public IMLMethod Create(String architecture, int input,
            int output)
        {
            IList<String> layers = ArchitectureParse.ParseLayers(architecture);
            if (layers.Count != MaxLayers)
            {
                throw new EncogError(
                    "RBF Networks must have exactly three elements, "
                    + "separated by ->.");
            }

            ArchitectureLayer inputLayer = ArchitectureParse.ParseLayer(
                layers[0], input);
            ArchitectureLayer rbfLayer = ArchitectureParse.ParseLayer(
                layers[1], -1);
            ArchitectureLayer outputLayer = ArchitectureParse.ParseLayer(
                layers[2], output);

            int inputCount = inputLayer.Count;
            int outputCount = outputLayer.Count;

            RBFEnum t;

            if (rbfLayer.Name.Equals("Gaussian", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.Gaussian;
            }
            else if (rbfLayer.Name.Equals("Multiquadric", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.Multiquadric;
            }
            else if (rbfLayer.Name.Equals("InverseMultiquadric", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.InverseMultiquadric;
            }
            else if (rbfLayer.Name.Equals("MexicanHat", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.MexicanHat;
            }
            else
            {
                throw new NeuralNetworkError("Unknown RBF: " + rbfLayer.Name);
            }

            var holder = new ParamsHolder(rbfLayer.Params);

            int rbfCount = holder.GetInt("C", true, 0);

            var result = new RBFNetwork(inputCount, rbfCount,
                                        outputCount, t);

            return result;
        }
Пример #22
0
        /// <summary>
        /// Create a PNN network.
        /// </summary>
        ///
        /// <param name="architecture">THe architecture string to use.</param>
        /// <param name="input">The input count.</param>
        /// <param name="output">The output count.</param>
        /// <returns>The RBF network.</returns>
        public IMLMethod Create(String architecture, int input,
                               int output)
        {
            IList<String> layers = ArchitectureParse.ParseLayers(architecture);
            if (layers.Count != MaxLayers)
            {
                throw new EncogError(
                    "PNN Networks must have exactly three elements, "
                    + "separated by ->.");
            }

            ArchitectureLayer inputLayer = ArchitectureParse.ParseLayer(
                layers[0], input);
            ArchitectureLayer pnnLayer = ArchitectureParse.ParseLayer(
                layers[1], -1);
            ArchitectureLayer outputLayer = ArchitectureParse.ParseLayer(
                layers[2], output);

            int inputCount = inputLayer.Count;
            int outputCount = outputLayer.Count;

            PNNKernelType kernel;
            PNNOutputMode outmodel;

            if (pnnLayer.Name.Equals("c", StringComparison.InvariantCultureIgnoreCase))
            {
                outmodel = PNNOutputMode.Classification;
            }
            else if (pnnLayer.Name.Equals("r", StringComparison.InvariantCultureIgnoreCase))
            {
                outmodel = PNNOutputMode.Regression;
            }
            else if (pnnLayer.Name.Equals("u", StringComparison.InvariantCultureIgnoreCase))
            {
                outmodel = PNNOutputMode.Unsupervised;
            }
            else
            {
                throw new NeuralNetworkError("Unknown model: " + pnnLayer.Name);
            }

            var holder = new ParamsHolder(pnnLayer.Params);

            String kernelStr = holder.GetString("KERNEL", false, "gaussian");

            if (kernelStr.Equals("gaussian", StringComparison.InvariantCultureIgnoreCase))
            {
                kernel = PNNKernelType.Gaussian;
            }
            else if (kernelStr.Equals("reciprocal", StringComparison.InvariantCultureIgnoreCase))
            {
                kernel = PNNKernelType.Reciprocal;
            }
            else
            {
                throw new NeuralNetworkError("Unknown kernel: " + kernelStr);
            }

            var result = new BasicPNN(kernel, outmodel, inputCount,
                                      outputCount);

            return result;
        }
Пример #23
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr)
 {
     double learnRate = new ParamsHolder(ArchitectureParse.ParseParams(argsStr)).GetDouble("LR", false, 0.1);
     return new ManhattanPropagation((BasicNetwork) method, training, learnRate);
 }
        private void MenuFileOpen_Click(object sender, RoutedEventArgs e)
        {
            OpenFileDialog dlg = new OpenFileDialog();
            dlg.DefaultExt = ".eg"; // Default file extension
            dlg.Filter = "Encog EG Files (.EG)|*.eg"; // Filter files by extension

            Nullable<bool> result = dlg.ShowDialog();

            if (result == true)
            {
                FileInfo inf = new FileInfo(dlg.FileName);
                if (inf.Directory != null)
                {

                    var tempn=  Encog.Util.NetworkUtil.NetworkUtility.LoadNetwork(inf.Directory.ToString(), dlg.FileName);

                    Network = tempn;
                    }

                if (Network == null)
                {
                    MessageBox.Show("This does not appear to be an EG file created for this example.");
                    return;
                }

                this.Util = new GatherUtil();
                ParamsHolder xpa = new ParamsHolder(Network.Properties);

                this.Util.EvalWindow = xpa.GetInt("eval", true, 1);
                this.Util.PredictWindow = xpa.GetInt("predict", true, 1);

                this.Util.EvalWindow = xpa.GetInt("eval", true, 1);
                this.Util.PredictWindow = xpa.GetInt("predict", true, 1);

               // this.Util.EvalWindow = Convert.ToInt16(Network.Properties["eval"]);

            }
        }
Пример #25
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr)
 {
     ParamsHolder holder = new ParamsHolder(ArchitectureParse.ParseParams(argsStr));
     double learnRate = holder.GetDouble("LR", false, 0.7);
     return new Backpropagation((BasicNetwork) method, training, learnRate, holder.GetDouble("MOM", false, 0.3));
 }
Пример #26
0
 public IMLMethod Create(string architecture, int input, int output)
 {
     ArchitectureLayer layer3;
     int count;
     int num2;
     PNNKernelType reciprocal;
     PNNOutputMode classification;
     ParamsHolder holder;
     string str;
     IList<string> list = ArchitectureParse.ParseLayers(architecture);
     if (list.Count != 3)
     {
         throw new EncogError("PNN Networks must have exactly three elements, separated by ->.");
     }
     ArchitectureLayer layer = ArchitectureParse.ParseLayer(list[0], input);
     ArchitectureLayer layer2 = ArchitectureParse.ParseLayer(list[1], -1);
     goto Label_015F;
     Label_000C:
     if (str.Equals("reciprocal", StringComparison.InvariantCultureIgnoreCase))
     {
         reciprocal = PNNKernelType.Reciprocal;
     }
     else
     {
         throw new NeuralNetworkError("Unknown kernel: " + str);
     }
     Label_0032:
     return new BasicPNN(reciprocal, classification, count, num2);
     Label_0089:
     throw new NeuralNetworkError("Unknown model: " + layer2.Name);
     Label_009F:
     holder = new ParamsHolder(layer2.Params);
     str = holder.GetString("KERNEL", false, "gaussian");
     if ((((uint) num2) & 0) == 0)
     {
         if (str.Equals("gaussian", StringComparison.InvariantCultureIgnoreCase))
         {
             reciprocal = PNNKernelType.Gaussian;
             if ((((uint) num2) - ((uint) input)) > uint.MaxValue)
             {
                 goto Label_0089;
             }
             goto Label_0032;
         }
         goto Label_000C;
     }
     Label_015F:
     layer3 = ArchitectureParse.ParseLayer(list[2], output);
     count = layer.Count;
     num2 = layer3.Count;
     if ((((uint) input) & 0) != 0)
     {
         goto Label_0089;
     }
     if (!layer2.Name.Equals("c", StringComparison.InvariantCultureIgnoreCase))
     {
         if (layer2.Name.Equals("r", StringComparison.InvariantCultureIgnoreCase))
         {
             classification = PNNOutputMode.Regression;
             if ((((uint) output) + ((uint) num2)) < 0)
             {
                 goto Label_000C;
             }
             goto Label_009F;
         }
         if (layer2.Name.Equals("u", StringComparison.InvariantCultureIgnoreCase))
         {
             classification = PNNOutputMode.Unsupervised;
             goto Label_009F;
         }
         goto Label_0089;
     }
     classification = PNNOutputMode.Classification;
     goto Label_009F;
 }
        /// <summary>
        /// Create a SVM trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is SupportVectorMachine))
            {
                throw new EncogError(
                    "SVM Train training cannot be used on a method of type: "
                    + method.GetType().FullName);
            }

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            new ParamsHolder(args);

            var holder = new ParamsHolder(args);
            double gammaStart = holder.GetDouble(
                PropertyGamma1, false,
                SVMSearchTrain.DefaultGammaBegin);
            double cStart = holder.GetDouble(PropertyC1,
                                             false, SVMSearchTrain.DefaultConstBegin);
            double gammaStop = holder.GetDouble(
                PropertyGamma2, false,
                SVMSearchTrain.DefaultGammaEnd);
            double cStop = holder.GetDouble(PropertyC2,
                                            false, SVMSearchTrain.DefaultConstEnd);
            double gammaStep = holder.GetDouble(
                PropertyGammaStep, false,
                SVMSearchTrain.DefaultGammaStep);
            double cStep = holder.GetDouble(PropertyCStep,
                                            false, SVMSearchTrain.DefaultConstStep);

            var result = new SVMSearchTrain((SupportVectorMachine) method, training)
                             {
                                 GammaBegin = gammaStart,
                                 GammaEnd = gammaStop,
                                 GammaStep = gammaStep,
                                 ConstBegin = cStart,
                                 ConstEnd = cStop,
                                 ConstStep = cStep
                             };

            return result;
        }
Пример #28
0
 public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr)
 {
     ParamsHolder holder;
     double num;
     double num2;
     double num3;
     double num4;
     double num5;
     double num6;
     SVMSearchTrain train2;
     if (method is SupportVectorMachine)
     {
         IDictionary<string, string> theParams = ArchitectureParse.ParseParams(argsStr);
         new ParamsHolder(theParams);
         if ((((uint) num3) - ((uint) num4)) < 0)
         {
             goto Label_0053;
         }
         if ((((uint) num2) + ((uint) num6)) <= uint.MaxValue)
         {
             holder = new ParamsHolder(theParams);
             num = holder.GetDouble("GAMMA1", false, -10.0);
             num2 = holder.GetDouble("C1", false, -5.0);
             goto Label_0101;
         }
         goto Label_016E;
     }
     goto Label_0185;
     Label_0053:
     train2.GammaEnd = num3;
     if ((((uint) num5) + ((uint) num)) > uint.MaxValue)
     {
         goto Label_0185;
     }
     train2.GammaStep = num5;
     if (((uint) num3) >= 0)
     {
         train2.ConstBegin = num2;
         train2.ConstEnd = num4;
         if ((((uint) num4) | 3) != 0)
         {
             train2.ConstStep = num6;
             return train2;
         }
     }
     else
     {
         return train2;
     }
     Label_0101:
     num3 = holder.GetDouble("GAMMA2", false, 10.0);
     num4 = holder.GetDouble("C2", false, 15.0);
     Label_016E:
     if (((uint) num4) <= uint.MaxValue)
     {
         num5 = holder.GetDouble("GAMMASTEP", false, 1.0);
         num6 = holder.GetDouble("CSTEP", false, 2.0);
         train2 = new SVMSearchTrain((SupportVectorMachine) method, training) {
             GammaBegin = num
         };
     }
     goto Label_0053;
     Label_0185:
     throw new EncogError("SVM Train training cannot be used on a method of type: " + method.GetType().FullName);
 }