public GetDouble ( String name, bool required, double defaultValue ) : double | ||
name | String | The name of the double. |
required | bool | True if this value is required. |
defaultValue | double | The default value. |
Результат | double |
/// <summary> /// Create an annealing trainer. /// </summary> /// /// <param name="method">The method to use.</param> /// <param name="training">The training data to use.</param> /// <param name="argsStr">The arguments to use.</param> /// <returns>The newly created trainer.</returns> public IMLTrain Create(IMLMethod method, IMLDataSet training, String argsStr) { if (!(method is BasicNetwork)) { throw new TrainingError( "Invalid method type, requires BasicNetwork"); } ICalculateScore score = new TrainingSetScore(training); IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr); var holder = new ParamsHolder(args); double startTemp = holder.GetDouble( MLTrainFactory.PropertyTemperatureStart, false, 10); double stopTemp = holder.GetDouble( MLTrainFactory.PropertyTemperatureStop, false, 2); int cycles = holder.GetInt(MLTrainFactory.Cycles, false, 100); IMLTrain train = new NeuralSimulatedAnnealing( (BasicNetwork) method, score, startTemp, stopTemp, cycles); return train; }
/// <summary> /// Create a SVM trainer. /// </summary> /// /// <param name="method">The method to use.</param> /// <param name="training">The training data to use.</param> /// <param name="argsStr">The arguments to use.</param> /// <returns>The newly created trainer.</returns> public IMLTrain Create(IMLMethod method, IMLDataSet training, String argsStr) { if (!(method is SupportVectorMachine)) { throw new EncogError( "SVM Train training cannot be used on a method of type: " + method.GetType().FullName); } double defaultGamma = 1.0d/((SupportVectorMachine) method).InputCount; double defaultC = 1.0d; IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr); var holder = new ParamsHolder(args); double gamma = holder.GetDouble(MLTrainFactory.PropertyGamma, false, defaultGamma); double c = holder.GetDouble(MLTrainFactory.PropertyC, false, defaultC); var result = new SVMTrain((SupportVectorMachine) method, training); result.Gamma = gamma; result.C = c; return result; }
/// <summary> /// Create an annealing trainer. /// </summary> /// /// <param name="method">The method to use.</param> /// <param name="training">The training data to use.</param> /// <param name="argsStr">The arguments to use.</param> /// <returns>The newly created trainer.</returns> public IMLTrain Create(IMLMethod method, IMLDataSet training, String argsStr) { if (!(method is BasicNetwork)) { throw new TrainingError( "Invalid method type, requires BasicNetwork"); } ICalculateScore score = new TrainingSetScore(training); IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr); var holder = new ParamsHolder(args); int populationSize = holder.GetInt( MLTrainFactory.PropertyPopulationSize, false, 5000); double mutation = holder.GetDouble( MLTrainFactory.PropertyMutation, false, 0.1d); double mate = holder.GetDouble(MLTrainFactory.PropertyMate, false, 0.25d); IMLTrain train = new NeuralGeneticAlgorithm((BasicNetwork) method, new RangeRandomizer(-1, 1), score, populationSize, mutation, mate); return train; }
public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr) { if (!(method is SupportVectorMachine)) { throw new EncogError("SVM Train training cannot be used on a method of type: " + method.GetType().FullName); } double defaultValue = 1.0 / ((double) ((SupportVectorMachine) method).InputCount); while (true) { double num4; SVMTrain train; double num2 = 1.0; IDictionary<string, string> theParams = ArchitectureParse.ParseParams(argsStr); ParamsHolder holder = new ParamsHolder(theParams); double num3 = holder.GetDouble("GAMMA", false, defaultValue); do { num4 = holder.GetDouble("C", false, num2); train = new SVMTrain((SupportVectorMachine) method, training) { Gamma = num3 }; } while (((uint) defaultValue) > uint.MaxValue); if ((((uint) num2) + ((uint) num3)) <= uint.MaxValue) { train.C = num4; return train; } } }
/// <summary> /// Create a backpropagation trainer. /// </summary> /// /// <param name="method">The method to use.</param> /// <param name="training">The training data to use.</param> /// <param name="argsStr">The arguments to use.</param> /// <returns>The newly created trainer.</returns> public IMLTrain Create(IMLMethod method, IMLDataSet training, String argsStr) { IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr); var holder = new ParamsHolder(args); double learningRate = holder.GetDouble( MLTrainFactory.PropertyLearningRate, false, 0.7d); double momentum = holder.GetDouble( MLTrainFactory.PropertyLearningMomentum, false, 0.3d); return new Backpropagation((BasicNetwork) method, training, learningRate, momentum); }
public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr) { if (method is IContainsFlat) { ParamsHolder holder = new ParamsHolder(ArchitectureParse.ParseParams(argsStr)); double initialUpdate = holder.GetDouble("INIT_UPDATE", false, 0.1); double maxStep = holder.GetDouble("MAX_STEP", false, 50.0); if ((((uint) initialUpdate) - ((uint) maxStep)) >= 0) { return new ResilientPropagation((IContainsFlat) method, training, initialUpdate, maxStep); } } throw new EncogError("RPROP training cannot be used on a method of type: " + method.GetType().FullName); }
public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr) { double num2; int num3; if (!(method is BasicNetwork)) { throw new TrainingError("Invalid method type, requires BasicNetwork"); } ICalculateScore calculateScore = new TrainingSetScore(training); ParamsHolder holder = new ParamsHolder(ArchitectureParse.ParseParams(argsStr)); double startTemp = holder.GetDouble("startTemp", false, 10.0); if (((((uint) num3) & 0) != 0) || ((((uint) num2) - ((uint) num2)) < 0)) { IMLTrain train; return train; } num2 = holder.GetDouble("stopTemp", false, 2.0); return new NeuralSimulatedAnnealing((BasicNetwork) method, calculateScore, startTemp, num2, holder.GetInt("cycles", false, 100)); }
public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr) { int num; double num3; IMLTrain train; if (!(method is BasicNetwork)) { throw new TrainingError("Invalid method type, requires BasicNetwork"); } ICalculateScore calculateScore = new TrainingSetScore(training); do { ParamsHolder holder = new ParamsHolder(ArchitectureParse.ParseParams(argsStr)); num = holder.GetInt("population", false, 0x1388); double mutationPercent = holder.GetDouble("mutate", false, 0.1); num3 = holder.GetDouble("mate", false, 0.25); train = new NeuralGeneticAlgorithm((BasicNetwork) method, new RangeRandomizer(-1.0, 1.0), calculateScore, num, mutationPercent, num3); } while ((((uint) num) - ((uint) num3)) < 0); return train; }
/// <summary> /// Create a RPROP trainer. /// </summary> /// /// <param name="method">The method to use.</param> /// <param name="training">The training data to use.</param> /// <param name="argsStr">The arguments to use.</param> /// <returns>The newly created trainer.</returns> public IMLTrain Create(IMLMethod method, IMLDataSet training, String argsStr) { if (!(method is IContainsFlat)) { throw new EncogError( "RPROP training cannot be used on a method of type: " + method.GetType().FullName); } IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr); var holder = new ParamsHolder(args); double initialUpdate = holder.GetDouble( MLTrainFactory.PropertyInitialUpdate, false, RPROPConst.DefaultInitialUpdate); double maxStep = holder.GetDouble( MLTrainFactory.PropertyMaxStep, false, RPROPConst.DefaultMaxStep); return new ResilientPropagation((IContainsFlat) method, training, initialUpdate, maxStep); }
/// <summary> /// Create a LMA trainer. /// </summary> /// /// <param name="method">The method to use.</param> /// <param name="training">The training data to use.</param> /// <param name="argsStr">The arguments to use.</param> /// <returns>The newly created trainer.</returns> public IMLTrain Create(IMLMethod method, IMLDataSet training, String argsStr) { if (!(method is SupportVectorMachine)) { throw new EncogError( "Neighborhood training cannot be used on a method of type: " + method.GetType().FullName); } IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr); var holder = new ParamsHolder(args); double learningRate = holder.GetDouble( MLTrainFactory.PropertyLearningRate, false, 0.7d); String neighborhoodStr = holder.GetString( MLTrainFactory.PropertyNeighborhood, false, "rbf"); String rbfTypeStr = holder.GetString( MLTrainFactory.PropertyRBFType, false, "gaussian"); RBFEnum t; if (rbfTypeStr.Equals("Gaussian", StringComparison.InvariantCultureIgnoreCase)) { t = RBFEnum.Gaussian; } else if (rbfTypeStr.Equals("Multiquadric", StringComparison.InvariantCultureIgnoreCase)) { t = RBFEnum.Multiquadric; } else if (rbfTypeStr.Equals("InverseMultiquadric", StringComparison.InvariantCultureIgnoreCase)) { t = RBFEnum.InverseMultiquadric; } else if (rbfTypeStr.Equals("MexicanHat", StringComparison.InvariantCultureIgnoreCase)) { t = RBFEnum.MexicanHat; } else { t = RBFEnum.Gaussian; } INeighborhoodFunction nf = null; if (neighborhoodStr.Equals("bubble", StringComparison.InvariantCultureIgnoreCase)) { nf = new NeighborhoodBubble(1); } else if (neighborhoodStr.Equals("rbf", StringComparison.InvariantCultureIgnoreCase)) { String str = holder.GetString( MLTrainFactory.PropertyDimensions, true, null); int[] size = NumberList.FromListInt(CSVFormat.EgFormat, str); nf = new NeighborhoodRBF(size, t); } else if (neighborhoodStr.Equals("rbf1d", StringComparison.InvariantCultureIgnoreCase)) { nf = new NeighborhoodRBF1D(t); } if (neighborhoodStr.Equals("single", StringComparison.InvariantCultureIgnoreCase)) { nf = new NeighborhoodSingle(); } var result = new BasicTrainSOM((SOMNetwork) method, learningRate, training, nf); if (args.ContainsKey(MLTrainFactory.PropertyIterations)) { int plannedIterations = holder.GetInt( MLTrainFactory.PropertyIterations, false, 1000); double startRate = holder.GetDouble( MLTrainFactory.PropertyStartLearningRate, false, 0.05d); double endRate = holder.GetDouble( MLTrainFactory.PropertyEndLearningRate, false, 0.05d); double startRadius = holder.GetDouble( MLTrainFactory.PropertyStartRadius, false, 10); double endRadius = holder.GetDouble( MLTrainFactory.PropertyEndRadius, false, 1); result.SetAutoDecay(plannedIterations, startRate, endRate, startRadius, endRadius); } return result; }
/// <summary> /// Create a SVM trainer. /// </summary> /// /// <param name="method">The method to use.</param> /// <param name="training">The training data to use.</param> /// <param name="argsStr">The arguments to use.</param> /// <returns>The newly created trainer.</returns> public IMLTrain Create(IMLMethod method, IMLDataSet training, String argsStr) { if (!(method is SupportVectorMachine)) { throw new EncogError( "SVM Train training cannot be used on a method of type: " + method.GetType().FullName); } IDictionary<String, String> args = ArchitectureParse.ParseParams(argsStr); new ParamsHolder(args); var holder = new ParamsHolder(args); double gammaStart = holder.GetDouble( PropertyGamma1, false, SVMSearchTrain.DefaultGammaBegin); double cStart = holder.GetDouble(PropertyC1, false, SVMSearchTrain.DefaultConstBegin); double gammaStop = holder.GetDouble( PropertyGamma2, false, SVMSearchTrain.DefaultGammaEnd); double cStop = holder.GetDouble(PropertyC2, false, SVMSearchTrain.DefaultConstEnd); double gammaStep = holder.GetDouble( PropertyGammaStep, false, SVMSearchTrain.DefaultGammaStep); double cStep = holder.GetDouble(PropertyCStep, false, SVMSearchTrain.DefaultConstStep); var result = new SVMSearchTrain((SupportVectorMachine) method, training) { GammaBegin = gammaStart, GammaEnd = gammaStop, GammaStep = gammaStep, ConstBegin = cStart, ConstEnd = cStop, ConstStep = cStep }; return result; }
public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr) { ParamsHolder holder = new ParamsHolder(ArchitectureParse.ParseParams(argsStr)); double learnRate = holder.GetDouble("LR", false, 0.7); return new Backpropagation((BasicNetwork) method, training, learnRate, holder.GetDouble("MOM", false, 0.3)); }
public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr) { IDictionary<string, string> dictionary; ParamsHolder holder; double num; string str; string str2; RBFEnum mexicanHat; INeighborhoodFunction function; string str3; int[] numArray; BasicTrainSOM nsom; int num2; double num3; double num4; double num6; if (method is SupportVectorMachine) { dictionary = ArchitectureParse.ParseParams(argsStr); holder = new ParamsHolder(dictionary); num = holder.GetDouble("LR", false, 0.7); str = holder.GetString("NEIGHBORHOOD", false, "rbf"); if (2 != 0) { goto Label_03DF; } goto Label_039F; } goto Label_03F4; Label_0083: num4 = holder.GetDouble("END_LR", false, 0.05); double startRadius = holder.GetDouble("START_RADIUS", false, 10.0); if ((((uint) num4) + ((uint) num4)) > uint.MaxValue) { return nsom; } if ((((uint) num3) + ((uint) num2)) <= uint.MaxValue) { num6 = holder.GetDouble("END_RADIUS", false, 1.0); nsom.SetAutoDecay(num2, num3, num4, startRadius, num6); return nsom; } Label_00E4: if (4 == 0) { if ((((uint) num3) + ((uint) num2)) > uint.MaxValue) { goto Label_0292; } goto Label_02F8; } Label_00EE: nsom = new BasicTrainSOM((SOMNetwork) method, num, training, function); do { if (dictionary.ContainsKey("ITERATIONS")) { do { num2 = holder.GetInt("ITERATIONS", false, 0x3e8); num3 = holder.GetDouble("START_LR", false, 0.05); } while ((((uint) num3) | 15) == 0); goto Label_0083; } } while ((((uint) num6) | 0xff) == 0); if (0 == 0) { if ((((uint) num3) + ((uint) num3)) >= 0) { return nsom; } goto Label_03F4; } if ((((uint) num2) - ((uint) startRadius)) <= uint.MaxValue) { goto Label_00E4; } goto Label_0083; Label_0184: if (!str.Equals("single", StringComparison.InvariantCultureIgnoreCase)) { goto Label_00EE; } function = new NeighborhoodSingle(); if ((((uint) num6) - ((uint) num3)) >= 0) { if ((((uint) num) - ((uint) startRadius)) >= 0) { goto Label_00E4; } goto Label_0324; } if ((((uint) num2) & 0) == 0) { goto Label_0233; } Label_01E2: while (!str.Equals("rbf1d", StringComparison.InvariantCultureIgnoreCase)) { if (0 == 0) { if ((((uint) num3) - ((uint) num6)) >= 0) { goto Label_0184; } goto Label_0233; } } function = new NeighborhoodRBF1D(mexicanHat); if ((((uint) num2) + ((uint) num)) >= 0) { if (((uint) num6) < 0) { goto Label_01E2; } goto Label_0184; } if (((uint) num2) < 0) { goto Label_03DF; } goto Label_01E2; Label_0233: function = new NeighborhoodRBF(numArray, mexicanHat); goto Label_0184; Label_0243: if (!str.Equals("rbf", StringComparison.InvariantCultureIgnoreCase)) { if ((((uint) num2) & 0) != 0) { goto Label_03DF; } goto Label_01E2; } Label_0292: str3 = holder.GetString("DIM", true, null); if ((((uint) num3) + ((uint) num)) > uint.MaxValue) { goto Label_0292; } numArray = NumberList.FromListInt(CSVFormat.EgFormat, str3); if ((((uint) num6) & 0) == 0) { goto Label_0233; } goto Label_0243; Label_02F8: if (str.Equals("bubble", StringComparison.InvariantCultureIgnoreCase)) { function = new NeighborhoodBubble(1); goto Label_0184; } if ((((uint) num3) & 0) == 0) { goto Label_0243; } goto Label_0292; Label_0324: function = null; goto Label_02F8; Label_0362: mexicanHat = RBFEnum.Multiquadric; goto Label_0324; Label_039F: mexicanHat = RBFEnum.Gaussian; goto Label_0324; Label_03DF: str2 = holder.GetString("RBF_TYPE", false, "gaussian"); if (str2.Equals("Gaussian", StringComparison.InvariantCultureIgnoreCase)) { goto Label_039F; } if (((uint) startRadius) <= uint.MaxValue) { if (str2.Equals("Multiquadric", StringComparison.InvariantCultureIgnoreCase)) { goto Label_0362; } if (!str2.Equals("InverseMultiquadric", StringComparison.InvariantCultureIgnoreCase) || ((((uint) num2) + ((uint) num2)) < 0)) { if (str2.Equals("MexicanHat", StringComparison.InvariantCultureIgnoreCase)) { mexicanHat = RBFEnum.MexicanHat; } else { mexicanHat = RBFEnum.Gaussian; } goto Label_0324; } } else if (((uint) num3) <= uint.MaxValue) { goto Label_0362; } if ((((uint) num3) - ((uint) num3)) <= uint.MaxValue) { mexicanHat = RBFEnum.InverseMultiquadric; goto Label_0324; } goto Label_00E4; Label_03F4: throw new EncogError("Neighborhood training cannot be used on a method of type: " + method.GetType().FullName); }
public IMLTrain Create(IMLMethod method, IMLDataSet training, string argsStr) { ParamsHolder holder; double num; double num2; double num3; double num4; double num5; double num6; SVMSearchTrain train2; if (method is SupportVectorMachine) { IDictionary<string, string> theParams = ArchitectureParse.ParseParams(argsStr); new ParamsHolder(theParams); if ((((uint) num3) - ((uint) num4)) < 0) { goto Label_0053; } if ((((uint) num2) + ((uint) num6)) <= uint.MaxValue) { holder = new ParamsHolder(theParams); num = holder.GetDouble("GAMMA1", false, -10.0); num2 = holder.GetDouble("C1", false, -5.0); goto Label_0101; } goto Label_016E; } goto Label_0185; Label_0053: train2.GammaEnd = num3; if ((((uint) num5) + ((uint) num)) > uint.MaxValue) { goto Label_0185; } train2.GammaStep = num5; if (((uint) num3) >= 0) { train2.ConstBegin = num2; train2.ConstEnd = num4; if ((((uint) num4) | 3) != 0) { train2.ConstStep = num6; return train2; } } else { return train2; } Label_0101: num3 = holder.GetDouble("GAMMA2", false, 10.0); num4 = holder.GetDouble("C2", false, 15.0); Label_016E: if (((uint) num4) <= uint.MaxValue) { num5 = holder.GetDouble("GAMMASTEP", false, 1.0); num6 = holder.GetDouble("CSTEP", false, 2.0); train2 = new SVMSearchTrain((SupportVectorMachine) method, training) { GammaBegin = num }; } goto Label_0053; Label_0185: throw new EncogError("SVM Train training cannot be used on a method of type: " + method.GetType().FullName); }