public void TestTrainingBounce() { var perceptron = new Perceptron(2); perceptron.ActivationFunction = x => x >= 1 ? 1 : 0; var trainer = new PerceptronTrainer(perceptron); var rows = new [] { new PerceptronTrainingRow(new [] { 0.0, 1.0 }, 0), new PerceptronTrainingRow(new [] { 0.5, 1.0 }, 0), new PerceptronTrainingRow(new [] { 1.0, 0.0 }, 0), new PerceptronTrainingRow(new [] { 1.5, 5.0 }, 0), new PerceptronTrainingRow(new [] { 3.0, 3.0 }, 0), new PerceptronTrainingRow(new [] { 3.5, 0.0 }, 0), new PerceptronTrainingRow(new [] { 1.0, 6.0 }, 1), new PerceptronTrainingRow(new [] { 2.0, 9.0 }, 1), new PerceptronTrainingRow(new [] { 4.0, 6.0 }, 1), new PerceptronTrainingRow(new [] { 5.5, 1.0 }, 1), new PerceptronTrainingRow(new [] { 6.0, 4.0 }, 1), new PerceptronTrainingRow(new [] { 9.0, 3.0 }, 1), }; trainer.Train(rows, 0.01, 500); perceptron.Inputs[0] = 2.0; perceptron.Inputs[1] = 2.0; Assert.AreEqual(0, perceptron.Output); perceptron.Inputs[0] = 7.0; perceptron.Inputs[1] = 6.0; Assert.AreEqual(1, perceptron.Output); }
public void TestPerceptronOnPrepAttachData() { var model = new PerceptronTrainer().TrainModel(400, new TwoPassDataIndexer(PrepAttachDataUtility.CreateTrainingStream(), 1, false), 1); PrepAttachDataUtility.TestModel(model, 0.7650408516959644); }
public static DotNeuralNet.Perceptrons.Perceptron CreateAndTrainPerceptron(Scenario scenario, Func <double, double> activationFunction, double adjust, int rounds) { var perceptron = new DotNeuralNet.Perceptrons.Perceptron(scenario.InputNodeCount); perceptron.ActivationFunction = activationFunction; var trainer = new PerceptronTrainer(perceptron); trainer.Train(GetTrainingRows(scenario).ToArray(), adjust, rounds); return(perceptron); }
private Tuple <Type, object> EvaluateCreateTrainer(ParseTreeNode node) { var trainerClassName = node.ChildNodes[0]; var trainerParams = EvaluateKeyValuePair(node.ChildNodes[1]); double learnRate = 0.05; double minError = 0.01; int maxEpochs = 100; int maxHiddenLayers = 2; int show = 10; IPerformanceFunction pFunc = null; TrainingModes tMode = TrainingModes.OnLine; if (trainerParams.ContainsKey("learnRate")) { learnRate = double.Parse(trainerParams["learnRate"]); } if (trainerParams.ContainsKey("minError")) { minError = double.Parse(trainerParams["minError"]); } if (trainerParams.ContainsKey("maxEpochs")) { maxEpochs = int.Parse(trainerParams["maxEpochs"]); } if (trainerParams.ContainsKey("maxHiddenLayers")) { maxHiddenLayers = int.Parse(trainerParams["maxHiddenLayers"]); } if (trainerParams.ContainsKey("show")) { show = int.Parse(trainerParams["show"]); } if (trainerParams.ContainsKey("performanceFunction")) { pFunc = (IPerformanceFunction)(typeof(PerformanceFunctions) .GetField(trainerParams["performanceFunction"]) .GetValue(null)); } if (trainerParams.ContainsKey("mode")) { tMode = (TrainingModes)Enum.Parse(typeof(TrainingModes), trainerParams["mode"]); } Trainer trainer = null; switch (trainerClassName.Token.Text) { case "BackPropagationTrainer": trainer = new BackPropagationTrainer(learnRate, minError, 0.01, maxEpochs, show, pFunc, tMode); break; case "PerceptronTrainer": trainer = new PerceptronTrainer(learnRate, minError, 0.01, maxEpochs, show, pFunc, tMode); break; case "ConstructiveTrainer": trainer = new ConstructiveTrainer(learnRate, minError, 0.01, maxEpochs, show, maxHiddenLayers, pFunc, tMode); break; default: throw new Exception("Trainer of kind " + trainerClassName.Token.Text + " does not exist."); } return(new Tuple <Type, object>(typeof(Trainer), trainer)); }
// TODO: Need a way to report results and settings back for inclusion in model ... public static AbstractModel train(EventStream events, IDictionary <string, string> trainParams, IDictionary <string, string> reportMap) { if (!isValid(trainParams)) { throw new System.ArgumentException("trainParams are not valid!"); } if (isSequenceTraining(trainParams)) { throw new System.ArgumentException("sequence training is not supported by this method!"); } string algorithmName = getStringParam(trainParams, ALGORITHM_PARAM, MAXENT_VALUE, reportMap); int iterations = getIntParam(trainParams, ITERATIONS_PARAM, ITERATIONS_DEFAULT, reportMap); int cutoff = getIntParam(trainParams, CUTOFF_PARAM, CUTOFF_DEFAULT, reportMap); bool sortAndMerge; if (MAXENT_VALUE.Equals(algorithmName) || MAXENT_QN_VALUE.Equals(algorithmName)) { sortAndMerge = true; } else if (PERCEPTRON_VALUE.Equals(algorithmName)) { sortAndMerge = false; } else { throw new IllegalStateException("Unexpected algorithm name: " + algorithmName); } HashSumEventStream hses = new HashSumEventStream(events); string dataIndexerName = getStringParam(trainParams, DATA_INDEXER_PARAM, DATA_INDEXER_TWO_PASS_VALUE, reportMap); DataIndexer indexer = null; if (DATA_INDEXER_ONE_PASS_VALUE.Equals(dataIndexerName)) { indexer = new OnePassDataIndexer(hses, cutoff, sortAndMerge); } else if (DATA_INDEXER_TWO_PASS_VALUE.Equals(dataIndexerName)) { indexer = new TwoPassDataIndexer(hses, cutoff, sortAndMerge); } else { throw new IllegalStateException("Unexpected data indexer name: " + dataIndexerName); } AbstractModel model; if (MAXENT_VALUE.Equals(algorithmName)) { int threads = getIntParam(trainParams, "Threads", 1, reportMap); model = opennlp.maxent.GIS.trainModel(iterations, indexer, true, false, null, 0, threads); } else if (MAXENT_QN_VALUE.Equals(algorithmName)) { int m = getIntParam(trainParams, "numOfUpdates", QNTrainer.DEFAULT_M, reportMap); int maxFctEval = getIntParam(trainParams, "maxFctEval", QNTrainer.DEFAULT_MAX_FCT_EVAL, reportMap); model = (new QNTrainer(m, maxFctEval, true)).trainModel(indexer); } else if (PERCEPTRON_VALUE.Equals(algorithmName)) { bool useAverage = getBooleanParam(trainParams, "UseAverage", true, reportMap); bool useSkippedAveraging = getBooleanParam(trainParams, "UseSkippedAveraging", false, reportMap); // overwrite otherwise it might not work if (useSkippedAveraging) { useAverage = true; } double stepSizeDecrease = getDoubleParam(trainParams, "StepSizeDecrease", 0, reportMap); double tolerance = getDoubleParam(trainParams, "Tolerance", PerceptronTrainer.TOLERANCE_DEFAULT, reportMap); PerceptronTrainer perceptronTrainer = new PerceptronTrainer(); perceptronTrainer.SkippedAveraging = useSkippedAveraging; if (stepSizeDecrease > 0) { perceptronTrainer.StepSizeDecrease = stepSizeDecrease; } perceptronTrainer.Tolerance = tolerance; model = perceptronTrainer.trainModel(iterations, indexer, cutoff, useAverage); } else { throw new IllegalStateException("Algorithm not supported: " + algorithmName); } if (reportMap != null) { reportMap["Training-Eventhash"] = hses.calculateHashSum().ToString("X"); // 16 Java : i.e. Hex } return(model); }