示例#1
0
        /// <summary>
        /// Create a SVM trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is SupportVectorMachine))
            {
                throw new EncogError(
                          "SVM Train training cannot be used on a method of type: "
                          + method.GetType().FullName);
            }

            double defaultGamma = 1.0d / ((SupportVectorMachine)method).InputCount;
            double defaultC     = 1.0d;

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var    holder = new ParamsHolder(args);
            double gamma  = holder.GetDouble(MLTrainFactory.PropertyGamma,
                                             false, defaultGamma);
            double c = holder.GetDouble(MLTrainFactory.PropertyC, false,
                                        defaultC);

            var result = new SVMTrain((SupportVectorMachine)method, training);

            result.Gamma = gamma;
            result.C     = c;
            return(result);
        }
示例#2
0
        /// <summary>
        /// Create a NEAT population.
        /// </summary>
        /// <param name="architecture">The architecture string to use.</param>
        /// <param name="input">The input count.</param>
        /// <param name="output">The output count.</param>
        /// <returns>The population.</returns>
        public IMLMethod Create(String architecture, int input,
                                int output)
        {
            if (input <= 0)
            {
                throw new EncogError("Must have at least one input for NEAT.");
            }

            if (output <= 0)
            {
                throw new EncogError("Must have at least one output for NEAT.");
            }

            IDictionary <String, String> args = ArchitectureParse.ParseParams(architecture);
            ParamsHolder holder = new ParamsHolder(args);

            int populationSize = holder.GetInt(
                MLMethodFactory.PropertyPopulationSize, false, 1000);

            int cycles = holder.GetInt(
                MLMethodFactory.PropertyCycles, false, NEATPopulation.DefaultCycles);

            IActivationFunction af = this.factory.Create(
                holder.GetString(MLMethodFactory.PropertyAF, false, MLActivationFactory.AF_SSIGMOID));

            NEATPopulation pop = new NEATPopulation(input, output, populationSize);

            pop.Reset();
            pop.ActivationCycles       = cycles;
            pop.NEATActivationFunction = af;

            return(pop);
        }
示例#3
0
        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                          "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder         = new ParamsHolder(args);
            int populationSize = holder.GetInt(
                MLTrainFactory.PropertyPopulationSize, false, 5000);
            double mutation = holder.GetDouble(
                MLTrainFactory.PropertyMutation, false, 0.1d);
            double mate = holder.GetDouble(MLTrainFactory.PropertyMate,
                                           false, 0.25d);

            IMLTrain train = new NeuralGeneticAlgorithm((BasicNetwork)method,
                                                        new RangeRandomizer(-1, 1), score, populationSize, mutation,
                                                        mate);

            return(train);
        }
示例#4
0
        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                          "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder         = new ParamsHolder(args);
            int populationSize = holder.GetInt(
                MLTrainFactory.PropertyPopulationSize, false, 5000);

            IMLTrain train = new MLMethodGeneticAlgorithm(() =>
            {
                IMLMethod result = (IMLMethod)ObjectCloner.DeepCopy(method);
                ((IMLResettable)result).Reset();
                return(result);
            }, score, populationSize);

            return(train);
        }
示例#5
0
        /// <summary>
        /// Create an annealing trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new TrainingError(
                          "Invalid method type, requires BasicNetwork");
            }

            ICalculateScore score = new TrainingSetScore(training);

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var    holder    = new ParamsHolder(args);
            double startTemp = holder.GetDouble(
                MLTrainFactory.PropertyTemperatureStart, false, 10);
            double stopTemp = holder.GetDouble(
                MLTrainFactory.PropertyTemperatureStop, false, 2);

            int cycles = holder.GetInt(MLTrainFactory.Cycles, false, 100);

            IMLTrain train = new NeuralSimulatedAnnealing(
                (BasicNetwork)method, score, startTemp, stopTemp, cycles);

            return(train);
        }
        private void SetDisplay(string tabIndex)
        {
            switch (tabIndex)
            {
            case "0":
                tcExcelParams.Visible = false;
                tcDynamParams.Width   = 1050;
                ParamsHolder.ShowAllParamsNotInRoot(true);

                break;

            case "2":
                tcExcelParams.Visible = false;
                tcDynamParams.Width   = 1050;
                ParamsHolder.ShowAllParamsNotInRoot(true);

                break;

            case "1":
                tcExcelParams.Visible       = true;
                tcExcelParams.Width         = 350;
                tcExcelParams.VerticalAlign = VerticalAlign.Top;
                switch (ExportType)
                {
                case 1:         //groupings only
                    ParamsHolder.ShowAllParamsNotInRoot(false);
                    DynamicReportParametersControl.HideDatePicker = false;
                    tcDynamParams.Width = 700;
                    break;

                case 2:         //filtering site only
                    ParamsHolder.ShowAllParamsInBranch(2, false);
                    DynamicReportParametersControl.HideDatePicker = false;
                    tcDynamParams.Width = 700;
                    break;

                case 3:         //grouping site only, filtering fleet only
                    ParamsHolder.ShowAllParamsInBranch(1, false);
                    DynamicReportParametersControl.HideDatePicker = false;
                    tcDynamParams.Width = 700;
                    break;

                case 4:         //groupings only, no date, no chart
                    ParamsHolder.ShowAllParamsNotInRoot(false);
                    DynamicReportParametersControl.HideDatePicker = true;
                    tcDynamParams.Width = 200;

                    break;

                default:        //filtering only, no change required
                    tcExcelParams.VerticalAlign = VerticalAlign.Middle;
                    tcDynamParams.Width         = 700;
                    break;
                }

                break;
            }
        }
示例#7
0
        /// <summary>
        /// Create a RBF network.
        /// </summary>
        ///
        /// <param name="architecture">THe architecture string to use.</param>
        /// <param name="input">The input count.</param>
        /// <param name="output">The output count.</param>
        /// <returns>The RBF network.</returns>
        public IMLMethod Create(String architecture, int input,
                                int output)
        {
            IList <String> layers = ArchitectureParse.ParseLayers(architecture);

            if (layers.Count != MaxLayers)
            {
                throw new EncogError(
                          "RBF Networks must have exactly three elements, "
                          + "separated by ->.");
            }

            ArchitectureLayer inputLayer = ArchitectureParse.ParseLayer(
                layers[0], input);
            ArchitectureLayer rbfLayer = ArchitectureParse.ParseLayer(
                layers[1], -1);
            ArchitectureLayer outputLayer = ArchitectureParse.ParseLayer(
                layers[2], output);

            int inputCount  = inputLayer.Count;
            int outputCount = outputLayer.Count;

            RBFEnum t;

            if (rbfLayer.Name.Equals("Gaussian", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.Gaussian;
            }
            else if (rbfLayer.Name.Equals("Multiquadric", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.Multiquadric;
            }
            else if (rbfLayer.Name.Equals("InverseMultiquadric", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.InverseMultiquadric;
            }
            else if (rbfLayer.Name.Equals("MexicanHat", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.MexicanHat;
            }
            else
            {
                throw new NeuralNetworkError("Unknown RBF: " + rbfLayer.Name);
            }

            var holder = new ParamsHolder(rbfLayer.Params);

            int rbfCount = holder.GetInt("C", true, 0);

            var result = new RBFNetwork(inputCount, rbfCount,
                                        outputCount, t);

            return(result);
        }
示例#8
0
        /// <summary>
        /// Create a Nelder Mead trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);

            //final double learningRate = holder.getDouble(
            //		MLTrainFactory.PROPERTY_LEARNING_RATE, false, 0.1);

            return(new NelderMeadTraining((BasicNetwork)method, training));
        }
示例#9
0
        /// <summary>
        /// Create a quick propagation trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);

            double learningRate = holder.GetDouble(
                MLTrainFactory.PropertyLearningRate, false, 2.0);

            return(new QuickPropagation((BasicNetwork)method, training, learningRate));
        }
示例#10
0
        public Program()
        {
            var logger = new Logger();

            logger.AddSource(new ConsoleLogSource());
            logger.CurrentLevel = LogLevel.Debug;

            _logger = logger;
            _connectionSignalsHandlerLazy = new Lazy <IConnectionSignalsHandler>(() => new NullConnectionSignalsHandler(_logger));

            var baseUrl = "https://remotecamera.azurewebsites.net/";

            _remoteCameraService = new RemoteCameraService(new NullConnectionSignalsHandler(_logger), new HubService(new HubClient(baseUrl + "/hub", _logger)), new SessionClient(baseUrl), _logger);
            _paramsHolder        = new ParamsHolder();
            InitializeBranches(_remoteCameraService);
        }
示例#11
0
        /// <summary>
        /// Create a PSO trainer.
        /// </summary>
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            ParamsHolder holder = new ParamsHolder(args);

            int particles = holder.GetInt(
                MLTrainFactory.PropertyParticles, false, 20);

            ICalculateScore score      = new TrainingSetScore(training);
            IRandomizer     randomizer = new NguyenWidrowRandomizer();

            IMLTrain train = new NeuralPSO((BasicNetwork)method, randomizer, score, particles);

            return(train);
        }
示例#12
0
        /// <summary>
        /// Create a LMA trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                              IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new EncogError(
                    "LMA training cannot be used on a method of type: "
                    + method.GetType().FullName);
            }

            IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);

            var result = new LevenbergMarquardtTraining(
                (BasicNetwork) method, training);
            return result;
        }
示例#13
0
        /// <summary>
        /// Create a feed forward network.
        /// </summary>
        /// <param name="architecture">The architecture string to use.</param>
        /// <param name="input">The input count.</param>
        /// <param name="output">The output count.</param>
        /// <returns>The feedforward network.</returns>
        public IMLMethod Create(String architecture, int input,
                                int output)
        {
            if (input <= 0)
            {
                throw new EncogError("Must have at least one input for EPL.");
            }

            if (output <= 0)
            {
                throw new EncogError("Must have at least one output for EPL.");
            }


            IDictionary <String, String> args = ArchitectureParse.ParseParams(architecture);
            var holder = new ParamsHolder(args);

            int populationSize = holder.GetInt(
                MLMethodFactory.PropertyPopulationSize, false, 1000);
            String variables = holder.GetString("vars", false, "x");
            String funct     = holder.GetString("funct", false, null);

            var context = new EncogProgramContext();

            string[] tok = variables.Split(',');
            foreach (string v in tok)
            {
                context.DefineVariable(v);
            }

            if (String.Compare("numeric", funct, StringComparison.OrdinalIgnoreCase) == 0)
            {
                StandardExtensions.CreateNumericOperators(context);
            }

            var pop = new PrgPopulation(context, populationSize);

            if (context.Functions.Count > 0)
            {
                (new RampedHalfAndHalf(context, 2, 6)).Generate(new EncogRandom(), pop);
            }
            return(pop);
        }
        /// <summary>
        /// Create a SVM trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is SupportVectorMachine))
            {
                throw new EncogError(
                          "SVM Train training cannot be used on a method of type: "
                          + method.GetType().FullName);
            }

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);

            new ParamsHolder(args);

            var    holder     = new ParamsHolder(args);
            double gammaStart = holder.GetDouble(
                PropertyGamma1, false,
                SVMSearchTrain.DefaultGammaBegin);
            double cStart = holder.GetDouble(PropertyC1,
                                             false, SVMSearchTrain.DefaultConstBegin);
            double gammaStop = holder.GetDouble(
                PropertyGamma2, false,
                SVMSearchTrain.DefaultGammaEnd);
            double cStop = holder.GetDouble(PropertyC2,
                                            false, SVMSearchTrain.DefaultConstEnd);
            double gammaStep = holder.GetDouble(
                PropertyGammaStep, false,
                SVMSearchTrain.DefaultGammaStep);
            double cStep = holder.GetDouble(PropertyCStep,
                                            false, SVMSearchTrain.DefaultConstStep);

            var result = new SVMSearchTrain((SupportVectorMachine)method, training)
            {
                GammaBegin = gammaStart,
                GammaEnd   = gammaStop,
                GammaStep  = gammaStep,
                ConstBegin = cStart,
                ConstEnd   = cStop,
                ConstStep  = cStep
            };

            return(result);
        }
        private void MenuFileOpen_Click(object sender, RoutedEventArgs e)
        {
            OpenFileDialog dlg = new OpenFileDialog();

            dlg.DefaultExt = ".eg";                       // Default file extension
            dlg.Filter     = "Encog EG Files (.EG)|*.eg"; // Filter files by extension

            Nullable <bool> result = dlg.ShowDialog();

            if (result == true)
            {
                FileInfo inf = new FileInfo(dlg.FileName);
                if (inf.Directory != null)
                {
                    var tempn = Encog.Util.NetworkUtil.NetworkUtility.LoadNetwork(inf.Directory.ToString(), dlg.FileName);

                    Network = tempn;
                }



                if (Network == null)
                {
                    MessageBox.Show("This does not appear to be an EG file created for this example.");
                    return;
                }


                this.Util = new GatherUtil();
                ParamsHolder xpa = new ParamsHolder(Network.Properties);

                this.Util.EvalWindow    = xpa.GetInt("eval", true, 1);
                this.Util.PredictWindow = xpa.GetInt("predict", true, 1);

                this.Util.EvalWindow    = xpa.GetInt("eval", true, 1);
                this.Util.PredictWindow = xpa.GetInt("predict", true, 1);

                // this.Util.EvalWindow = Convert.ToInt16(Network.Properties["eval"]);
            }
        }
        /// <summary>
        /// Create a RPROP trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is IContainsFlat))
            {
                throw new EncogError(
                          "RPROP training cannot be used on a method of type: "
                          + method.GetType().FullName);
            }

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var    holder        = new ParamsHolder(args);
            double initialUpdate = holder.GetDouble(
                MLTrainFactory.PropertyInitialUpdate, false,
                RPROPConst.DefaultInitialUpdate);
            double maxStep = holder.GetDouble(
                MLTrainFactory.PropertyMaxStep, false,
                RPROPConst.DefaultMaxStep);

            return(new ResilientPropagation((IContainsFlat)method, training,
                                            initialUpdate, maxStep));
        }
        /// <summary>
        /// Create a LMA trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is BasicNetwork))
            {
                throw new EncogError(
                          "LMA training cannot be used on a method of type: "
                          + method.GetType().FullName);
            }

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var  holder = new ParamsHolder(args);
            bool useReg = holder.GetBoolean(
                MLTrainFactory.PropertyBayesianRegularization, false, false);

            var result = new LevenbergMarquardtTraining(
                (BasicNetwork)method, training)
            {
                UseBayesianRegularization = useReg
            };

            return(result);
        }
        private void MenuFileOpenClick(object sender, RoutedEventArgs e)
        {
            var dlg = new OpenFileDialog {
                DefaultExt = ".eg", Filter = "Encog EG Files (.EG)|*.eg"
            };

            bool?result = dlg.ShowDialog();

            if (result != true)
            {
                return;
            }
            var inf = new FileInfo(dlg.FileName);

            if (inf.Directory != null)
            {
                BasicNetwork tempn = NetworkUtility.LoadNetwork(inf.Directory.ToString(), dlg.FileName);

                Network = tempn;
            }

            if (Network == null)
            {
                MessageBox.Show("This does not appear to be an EG file created for this example.");
                return;
            }


            Util = new GatherUtil();
            var xpa = new ParamsHolder(Network.Properties);

            Util.EvalWindow    = xpa.GetInt("eval", true, 1);
            Util.PredictWindow = xpa.GetInt("predict", true, 1);

            Util.EvalWindow    = xpa.GetInt("eval", true, 1);
            Util.PredictWindow = xpa.GetInt("predict", true, 1);
        }
        /// <summary>
        /// Create a PNN network.
        /// </summary>
        ///
        /// <param name="architecture">THe architecture string to use.</param>
        /// <param name="input">The input count.</param>
        /// <param name="output">The output count.</param>
        /// <returns>The RBF network.</returns>
        public IMLMethod Create(String architecture, int input,
                                int output)
        {
            IList <String> layers = ArchitectureParse.ParseLayers(architecture);

            if (layers.Count != MaxLayers)
            {
                throw new EncogError(
                          "PNN Networks must have exactly three elements, "
                          + "separated by ->.");
            }

            ArchitectureLayer inputLayer = ArchitectureParse.ParseLayer(
                layers[0], input);
            ArchitectureLayer pnnLayer = ArchitectureParse.ParseLayer(
                layers[1], -1);
            ArchitectureLayer outputLayer = ArchitectureParse.ParseLayer(
                layers[2], output);

            int inputCount  = inputLayer.Count;
            int outputCount = outputLayer.Count;

            PNNKernelType kernel;
            PNNOutputMode outmodel;

            if (pnnLayer.Name.Equals("c", StringComparison.InvariantCultureIgnoreCase))
            {
                outmodel = PNNOutputMode.Classification;
            }
            else if (pnnLayer.Name.Equals("r", StringComparison.InvariantCultureIgnoreCase))
            {
                outmodel = PNNOutputMode.Regression;
            }
            else if (pnnLayer.Name.Equals("u", StringComparison.InvariantCultureIgnoreCase))
            {
                outmodel = PNNOutputMode.Unsupervised;
            }
            else
            {
                throw new NeuralNetworkError("Unknown model: " + pnnLayer.Name);
            }

            var holder = new ParamsHolder(pnnLayer.Params);

            String kernelStr = holder.GetString("KERNEL", false, "gaussian");

            if (kernelStr.Equals("gaussian", StringComparison.InvariantCultureIgnoreCase))
            {
                kernel = PNNKernelType.Gaussian;
            }
            else if (kernelStr.Equals("reciprocal", StringComparison.InvariantCultureIgnoreCase))
            {
                kernel = PNNKernelType.Reciprocal;
            }
            else
            {
                throw new NeuralNetworkError("Unknown kernel: " + kernelStr);
            }

            var result = new BasicPNN(kernel, outmodel, inputCount,
                                      outputCount);

            return(result);
        }
        /**
         * Create a K2 trainer.
         *
         * @param method
         *            The method to use.
         * @param training
         *            The training data to use.
         * @param argsStr
         *            The arguments to use.
         * @return The newly created trainer.
         */
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            ParamsHolder holder = new ParamsHolder(args);

            int maxParents = holder.GetInt(
                MLTrainFactory.PropertyMaxParents, false, 1);
            String searchStr    = holder.GetString("SEARCH", false, "k2");
            String estimatorStr = holder.GetString("ESTIMATOR", false, "simple");
            String initStr      = holder.GetString("INIT", false, "naive");

            IBayesSearch    search;
            IBayesEstimator estimator;
            BayesianInit    init;

            if (string.Compare(searchStr, "k2", true) == 0)
            {
                search = new SearchK2();
            }
            else if (string.Compare(searchStr, "none", true) == 0)
            {
                search = new SearchNone();
            }
            else
            {
                throw new BayesianError("Invalid search type: " + searchStr);
            }

            if (string.Compare(estimatorStr, "simple", true) == 0)
            {
                estimator = new SimpleEstimator();
            }
            else if (string.Compare(estimatorStr, "none", true) == 0)
            {
                estimator = new EstimatorNone();
            }
            else
            {
                throw new BayesianError("Invalid estimator type: " + estimatorStr);
            }

            if (string.Compare(initStr, "simple") == 0)
            {
                init = BayesianInit.InitEmpty;
            }
            else if (string.Compare(initStr, "naive") == 0)
            {
                init = BayesianInit.InitNaiveBayes;
            }
            else if (string.Compare(initStr, "none") == 0)
            {
                init = BayesianInit.InitNoChange;
            }
            else
            {
                throw new BayesianError("Invalid init type: " + initStr);
            }

            return(new TrainBayesian((BayesianNetwork)method, training, maxParents, init, search, estimator));
        }
        /// <summary>
        /// Create a LMA trainer.
        /// </summary>
        ///
        /// <param name="method">The method to use.</param>
        /// <param name="training">The training data to use.</param>
        /// <param name="argsStr">The arguments to use.</param>
        /// <returns>The newly created trainer.</returns>
        public IMLTrain Create(IMLMethod method,
                               IMLDataSet training, String argsStr)
        {
            if (!(method is SupportVectorMachine))
            {
                throw new EncogError(
                          "Neighborhood training cannot be used on a method of type: "
                          + method.GetType().FullName);
            }

            IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr);
            var holder = new ParamsHolder(args);

            double learningRate = holder.GetDouble(
                MLTrainFactory.PropertyLearningRate, false, 0.7d);
            String neighborhoodStr = holder.GetString(
                MLTrainFactory.PropertyNeighborhood, false, "rbf");
            String rbfTypeStr = holder.GetString(
                MLTrainFactory.PropertyRBFType, false, "gaussian");

            RBFEnum t;

            if (rbfTypeStr.Equals("Gaussian", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.Gaussian;
            }
            else if (rbfTypeStr.Equals("Multiquadric", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.Multiquadric;
            }
            else if (rbfTypeStr.Equals("InverseMultiquadric", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.InverseMultiquadric;
            }
            else if (rbfTypeStr.Equals("MexicanHat", StringComparison.InvariantCultureIgnoreCase))
            {
                t = RBFEnum.MexicanHat;
            }
            else
            {
                t = RBFEnum.Gaussian;
            }

            INeighborhoodFunction nf = null;

            if (neighborhoodStr.Equals("bubble", StringComparison.InvariantCultureIgnoreCase))
            {
                nf = new NeighborhoodBubble(1);
            }
            else if (neighborhoodStr.Equals("rbf", StringComparison.InvariantCultureIgnoreCase))
            {
                String str = holder.GetString(
                    MLTrainFactory.PropertyDimensions, true, null);
                int[] size = NumberList.FromListInt(CSVFormat.EgFormat, str);
                nf = new NeighborhoodRBF(size, t);
            }
            else if (neighborhoodStr.Equals("rbf1d", StringComparison.InvariantCultureIgnoreCase))
            {
                nf = new NeighborhoodRBF1D(t);
            }
            if (neighborhoodStr.Equals("single", StringComparison.InvariantCultureIgnoreCase))
            {
                nf = new NeighborhoodSingle();
            }

            var result = new BasicTrainSOM((SOMNetwork)method,
                                           learningRate, training, nf);

            if (args.ContainsKey(MLTrainFactory.PropertyIterations))
            {
                int plannedIterations = holder.GetInt(
                    MLTrainFactory.PropertyIterations, false, 1000);
                double startRate = holder.GetDouble(
                    MLTrainFactory.PropertyStartLearningRate, false, 0.05d);
                double endRate = holder.GetDouble(
                    MLTrainFactory.PropertyEndLearningRate, false, 0.05d);
                double startRadius = holder.GetDouble(
                    MLTrainFactory.PropertyStartRadius, false, 10);
                double endRadius = holder.GetDouble(
                    MLTrainFactory.PropertyEndRadius, false, 1);
                result.SetAutoDecay(plannedIterations, startRate, endRate,
                                    startRadius, endRadius);
            }

            return(result);
        }