/// <summary> /// Create a NEAT population. /// </summary> /// <param name="architecture">The architecture string to use.</param> /// <param name="input">The input count.</param> /// <param name="output">The output count.</param> /// <returns>The population.</returns> public IMLMethod Create(String architecture, int input, int output) { if (input <= 0) { throw new EncogError("Must have at least one input for NEAT."); } if (output <= 0) { throw new EncogError("Must have at least one output for NEAT."); } IDictionary <String, String> args = ArchitectureParse.ParseParams(architecture); ParamsHolder holder = new ParamsHolder(args); int populationSize = holder.GetInt( MLMethodFactory.PropertyPopulationSize, false, 1000); int cycles = holder.GetInt( MLMethodFactory.PropertyCycles, false, NEATPopulation.DefaultCycles); IActivationFunction af = this.factory.Create( holder.GetString(MLMethodFactory.PropertyAF, false, MLActivationFactory.AF_SSIGMOID)); NEATPopulation pop = new NEATPopulation(input, output, populationSize); pop.Reset(); pop.ActivationCycles = cycles; pop.NEATActivationFunction = af; return(pop); }
/// <summary> /// Create a feed forward network. /// </summary> /// <param name="architecture">The architecture string to use.</param> /// <param name="input">The input count.</param> /// <param name="output">The output count.</param> /// <returns>The feedforward network.</returns> public IMLMethod Create(String architecture, int input, int output) { if (input <= 0) { throw new EncogError("Must have at least one input for EPL."); } if (output <= 0) { throw new EncogError("Must have at least one output for EPL."); } IDictionary <String, String> args = ArchitectureParse.ParseParams(architecture); var holder = new ParamsHolder(args); int populationSize = holder.GetInt( MLMethodFactory.PropertyPopulationSize, false, 1000); String variables = holder.GetString("vars", false, "x"); String funct = holder.GetString("funct", false, null); var context = new EncogProgramContext(); string[] tok = variables.Split(','); foreach (string v in tok) { context.DefineVariable(v); } if (String.Compare("numeric", funct, StringComparison.OrdinalIgnoreCase) == 0) { StandardExtensions.CreateNumericOperators(context); } var pop = new PrgPopulation(context, populationSize); if (context.Functions.Count > 0) { (new RampedHalfAndHalf(context, 2, 6)).Generate(new EncogRandom(), pop); } return(pop); }
/** * Create a K2 trainer. * * @param method * The method to use. * @param training * The training data to use. * @param argsStr * The arguments to use. * @return The newly created trainer. */ public IMLTrain Create(IMLMethod method, IMLDataSet training, String argsStr) { IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr); ParamsHolder holder = new ParamsHolder(args); int maxParents = holder.GetInt( MLTrainFactory.PropertyMaxParents, false, 1); String searchStr = holder.GetString("SEARCH", false, "k2"); String estimatorStr = holder.GetString("ESTIMATOR", false, "simple"); String initStr = holder.GetString("INIT", false, "naive"); IBayesSearch search; IBayesEstimator estimator; BayesianInit init; if (string.Compare(searchStr, "k2", true) == 0) { search = new SearchK2(); } else if (string.Compare(searchStr, "none", true) == 0) { search = new SearchNone(); } else { throw new BayesianError("Invalid search type: " + searchStr); } if (string.Compare(estimatorStr, "simple", true) == 0) { estimator = new SimpleEstimator(); } else if (string.Compare(estimatorStr, "none", true) == 0) { estimator = new EstimatorNone(); } else { throw new BayesianError("Invalid estimator type: " + estimatorStr); } if (string.Compare(initStr, "simple") == 0) { init = BayesianInit.InitEmpty; } else if (string.Compare(initStr, "naive") == 0) { init = BayesianInit.InitNaiveBayes; } else if (string.Compare(initStr, "none") == 0) { init = BayesianInit.InitNoChange; } else { throw new BayesianError("Invalid init type: " + initStr); } return(new TrainBayesian((BayesianNetwork)method, training, maxParents, init, search, estimator)); }
/// <summary> /// Create a LMA trainer. /// </summary> /// /// <param name="method">The method to use.</param> /// <param name="training">The training data to use.</param> /// <param name="argsStr">The arguments to use.</param> /// <returns>The newly created trainer.</returns> public IMLTrain Create(IMLMethod method, IMLDataSet training, String argsStr) { if (!(method is SupportVectorMachine)) { throw new EncogError( "Neighborhood training cannot be used on a method of type: " + method.GetType().FullName); } IDictionary <String, String> args = ArchitectureParse.ParseParams(argsStr); var holder = new ParamsHolder(args); double learningRate = holder.GetDouble( MLTrainFactory.PropertyLearningRate, false, 0.7d); String neighborhoodStr = holder.GetString( MLTrainFactory.PropertyNeighborhood, false, "rbf"); String rbfTypeStr = holder.GetString( MLTrainFactory.PropertyRBFType, false, "gaussian"); RBFEnum t; if (rbfTypeStr.Equals("Gaussian", StringComparison.InvariantCultureIgnoreCase)) { t = RBFEnum.Gaussian; } else if (rbfTypeStr.Equals("Multiquadric", StringComparison.InvariantCultureIgnoreCase)) { t = RBFEnum.Multiquadric; } else if (rbfTypeStr.Equals("InverseMultiquadric", StringComparison.InvariantCultureIgnoreCase)) { t = RBFEnum.InverseMultiquadric; } else if (rbfTypeStr.Equals("MexicanHat", StringComparison.InvariantCultureIgnoreCase)) { t = RBFEnum.MexicanHat; } else { t = RBFEnum.Gaussian; } INeighborhoodFunction nf = null; if (neighborhoodStr.Equals("bubble", StringComparison.InvariantCultureIgnoreCase)) { nf = new NeighborhoodBubble(1); } else if (neighborhoodStr.Equals("rbf", StringComparison.InvariantCultureIgnoreCase)) { String str = holder.GetString( MLTrainFactory.PropertyDimensions, true, null); int[] size = NumberList.FromListInt(CSVFormat.EgFormat, str); nf = new NeighborhoodRBF(size, t); } else if (neighborhoodStr.Equals("rbf1d", StringComparison.InvariantCultureIgnoreCase)) { nf = new NeighborhoodRBF1D(t); } if (neighborhoodStr.Equals("single", StringComparison.InvariantCultureIgnoreCase)) { nf = new NeighborhoodSingle(); } var result = new BasicTrainSOM((SOMNetwork)method, learningRate, training, nf); if (args.ContainsKey(MLTrainFactory.PropertyIterations)) { int plannedIterations = holder.GetInt( MLTrainFactory.PropertyIterations, false, 1000); double startRate = holder.GetDouble( MLTrainFactory.PropertyStartLearningRate, false, 0.05d); double endRate = holder.GetDouble( MLTrainFactory.PropertyEndLearningRate, false, 0.05d); double startRadius = holder.GetDouble( MLTrainFactory.PropertyStartRadius, false, 10); double endRadius = holder.GetDouble( MLTrainFactory.PropertyEndRadius, false, 1); result.SetAutoDecay(plannedIterations, startRate, endRate, startRadius, endRadius); } return(result); }
/// <summary> /// Create a PNN network. /// </summary> /// /// <param name="architecture">THe architecture string to use.</param> /// <param name="input">The input count.</param> /// <param name="output">The output count.</param> /// <returns>The RBF network.</returns> public IMLMethod Create(String architecture, int input, int output) { IList <String> layers = ArchitectureParse.ParseLayers(architecture); if (layers.Count != MaxLayers) { throw new EncogError( "PNN Networks must have exactly three elements, " + "separated by ->."); } ArchitectureLayer inputLayer = ArchitectureParse.ParseLayer( layers[0], input); ArchitectureLayer pnnLayer = ArchitectureParse.ParseLayer( layers[1], -1); ArchitectureLayer outputLayer = ArchitectureParse.ParseLayer( layers[2], output); int inputCount = inputLayer.Count; int outputCount = outputLayer.Count; PNNKernelType kernel; PNNOutputMode outmodel; if (pnnLayer.Name.Equals("c", StringComparison.InvariantCultureIgnoreCase)) { outmodel = PNNOutputMode.Classification; } else if (pnnLayer.Name.Equals("r", StringComparison.InvariantCultureIgnoreCase)) { outmodel = PNNOutputMode.Regression; } else if (pnnLayer.Name.Equals("u", StringComparison.InvariantCultureIgnoreCase)) { outmodel = PNNOutputMode.Unsupervised; } else { throw new NeuralNetworkError("Unknown model: " + pnnLayer.Name); } var holder = new ParamsHolder(pnnLayer.Params); String kernelStr = holder.GetString("KERNEL", false, "gaussian"); if (kernelStr.Equals("gaussian", StringComparison.InvariantCultureIgnoreCase)) { kernel = PNNKernelType.Gaussian; } else if (kernelStr.Equals("reciprocal", StringComparison.InvariantCultureIgnoreCase)) { kernel = PNNKernelType.Reciprocal; } else { throw new NeuralNetworkError("Unknown kernel: " + kernelStr); } var result = new BasicPNN(kernel, outmodel, inputCount, outputCount); return(result); }