/// <summary> /// Evaluate training. /// </summary> /// <param name="input">Input neurons.</param> /// <param name="hidden1">Hidden 1 neurons.</param> /// <param name="hidden2">Hidden 2 neurons.</param> /// <param name="output">Output neurons.</param> /// <returns>The result of the evaluation.</returns> public static int EvaluateTrain(int input, int hidden1, int hidden2, int output) { BasicNetwork network = EncogUtility.SimpleFeedForward(input, hidden1, hidden2, output, true); IMLDataSet training = RandomTrainingFactory.Generate(1000, 10000, input, output, -1, 1); return(EvaluateTrain(network, training)); }
public void TestRandomizeNeuronOutput() { double[] d = { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }; BasicNetwork network = EncogUtility.SimpleFeedForward(2, 3, 0, 1, false); NetworkCODEC.ArrayToNetwork(d, network); PruneSelective prune = new PruneSelective(network); prune.RandomizeNeuron(100, 100, 2, 0); Assert.AreEqual("100,100,100,100,0,0,0,0,0,0,0,0,0", network.DumpWeights()); }
/// <inheritdoc/> public double CalculateError(IMLDataSet data) { if (OutputMode == PNNOutputMode.Classification) { return(EncogUtility.CalculateClassificationError(this, data)); } else { return(EncogUtility.CalculateRegressionError(this, data)); } }
private void MenuNetTrainClick(object sender, RoutedEventArgs e) { if (Training == null) { MessageBox.Show("Can't train yet. Obtain some data first."); return; } Network = EncogUtility.SimpleFeedForward(14, 100, 0, 1, false); EncogUtility.TrainDialog(Network, Training); }
public void Execute(IExampleInterface app) { IMLDataSet trainingData = GenerateTraining(InputOutputCount, Compl); IMLMethod method = EncogUtility.SimpleFeedForward(InputOutputCount, HiddenCount, 0, InputOutputCount, false); var train = new LevenbergMarquardtTraining((BasicNetwork)method, trainingData); EncogUtility.TrainToError(train, 0.01); EncogFramework.Instance.Shutdown(); }
/// <summary> /// Loads an IMLDataset training file in a given directory. /// </summary> /// <param name="directory">The directory.</param> /// <param name="file">The file.</param> /// <returns>an imldataset ready to use.</returns> public static IMLDataSet LoadTraining(string file) { FileInfo networkFile = new FileInfo(@file); if (!networkFile.Exists) { return(null); } IMLDataSet network = (IMLDataSet)EncogUtility.LoadEGB2Memory(networkFile); return(network); }
/// <summary> /// Loads an IMLDataset training file in a given directory. /// </summary> /// <param name="directory">The directory.</param> /// <param name="file">The file.</param> /// <returns></returns> public static IMLDataSet LoadTraining(string directory, string file) { FileInfo networkFile = FileUtil.CombinePath(new FileInfo(@directory), @file); if (!networkFile.Exists) { return(null); } IMLDataSet network = (IMLDataSet)EncogUtility.LoadEGB2Memory(networkFile); return(network); }
/** * Obtain the training set. * @return The training set. */ private IMLDataSet ObtainTrainingSet() { ScriptProperties prop = EncogAnalyst.Script.Properties; String trainingID = prop.GetPropertyString( ScriptProperties.MlConfigTrainingFile); FileInfo trainingFile = EncogAnalyst.Script.ResolveFilename(trainingID); IMLDataSet trainingSet = EncogUtility.LoadEGB2Memory(trainingFile); return(trainingSet); }
private void ProcessNetwork() { app.WriteLine("Downsampling images..."); foreach (ImagePair pair in imageList) { IMLData ideal = new BasicMLData(outputCount); int idx = pair.Identity; for (int i = 0; i < outputCount; i++) { if (i == idx) { ideal.Data[i] = 1; } else { ideal.Data[i] = -1; } } try { var img = new Bitmap(pair.File); var data = new ImageMLData(img); training.Add(data, ideal); } catch (Exception e) { app.WriteLine("Error loading: " + pair.File + ": " + e.Message); } } String strHidden1 = GetArg("hidden1"); String strHidden2 = GetArg("hidden2"); if (training.Count == 0) { app.WriteLine("No images to create network for."); return; } training.Downsample(downsampleHeight, downsampleWidth); int hidden1 = int.Parse(strHidden1); int hidden2 = int.Parse(strHidden2); network = EncogUtility.SimpleFeedForward(training .InputSize, hidden1, hidden2, training.IdealSize, true); app.WriteLine("Created network: " + network); }
public void TestPruneNeuronHidden() { BasicNetwork network = ObtainNetwork(); PruneSelective prune = new PruneSelective(network); prune.Prune(1, 1); Assert.AreEqual(18, network.EncodedArrayLength()); Assert.AreEqual(2, network.GetLayerNeuronCount(1)); Assert.AreEqual("1,3,4,5,7,8,9,11,12,13,15,16,17,18,19,23,24,25", network.DumpWeights()); BasicNetwork model = EncogUtility.SimpleFeedForward(2, 2, 0, 4, false); CheckWithModel(model.Structure.Flat, network.Structure.Flat); }
public void TestIncreaseNeuronCountHidden2() { BasicNetwork network = EncogUtility.SimpleFeedForward(5, 6, 0, 2, true); PruneSelective prune = new PruneSelective(network); prune.ChangeNeuronCount(1, 60); BasicMLData input = new BasicMLData(5); BasicNetwork model = EncogUtility.SimpleFeedForward(5, 60, 0, 2, true); CheckWithModel(model.Structure.Flat, network.Structure.Flat); model.Compute(input); network.Compute(input); }
public void TestRPROPFolded() { IMLDataSet trainingData = XOR.CreateNoisyXORDataSet(10); BasicNetwork network = NetworkUtil.CreateXORNetworkUntrained(); var folded = new FoldedDataSet(trainingData); IMLTrain train = new ResilientPropagation(network, folded); var trainFolded = new CrossValidationKFold(train, 4); EncogUtility.TrainToError(trainFolded, 0.2); XOR.VerifyXOR((IMLRegression)trainFolded.Method, 0.2); }
static private IMLDataSet LoadDataSet(FileInfo trainFile) { Console.WriteLine("Loading dataset."); if (!trainFile.Exists) { Console.WriteLine(@"File not found: " + trainFile); Console.ReadKey(); Environment.Exit(0); } IMLDataSet trainingSet = EncogUtility.LoadEGB2Memory(trainFile); Console.WriteLine($"Loaded {trainingSet.Count} samples. Input size: {trainingSet.InputSize}, Output size: {trainingSet.IdealSize}"); return(trainingSet); }
static void Main(string[] args) { IMLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL); BasicNetwork network = EncogUtility.SimpleFeedForward(2, 6, 0, 1, false); EncogUtility.TrainToError(network, trainingSet, 0.01); double error = network.CalculateError(trainingSet); Console.WriteLine($"Error before save to EG: {error}"); EncogDirectoryPersistence.SaveObject(new FileInfo(FILENAME), network); network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(new FileInfo(FILENAME)); error = network.CalculateError(trainingSet); Console.WriteLine($"Error after load from EG: {error}"); }
static private BasicNetwork LoadNetwork(FileInfo networkFile, IMLDataSet trainingSet) { if (networkFile.Exists) { Console.WriteLine($"Loading network {networkFile.FullName}"); return((BasicNetwork)EncogDirectoryPersistence.LoadObject(networkFile)); } else { Console.WriteLine("Creating NN."); var network = EncogUtility.SimpleFeedForward(input: trainingSet.InputSize, hidden1: 500, hidden2: 50, output: 3, tanh: true); network.Reset(); return(network); } }
public void Execute(IExampleInterface app) { this.app = app; IMLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL); BasicNetwork network = EncogUtility.SimpleFeedForward(2, 6, 0, 1, false); EncogUtility.TrainToError(network, trainingSet, 0.01); double error = network.CalculateError(trainingSet); EncogDirectoryPersistence.SaveObject(new FileInfo(FILENAME), network); double error2 = network.CalculateError(trainingSet); app.WriteLine("Error before save to EG: " + Format.FormatPercent(error)); app.WriteLine("Error before after to EG: " + Format.FormatPercent(error2)); }
public void TestIncreaseNeuronCountHidden() { BasicNetwork network = XOR.CreateTrainedXOR(); Assert.IsTrue(XOR.VerifyXOR(network, 0.10)); PruneSelective prune = new PruneSelective(network); prune.ChangeNeuronCount(1, 5); BasicNetwork model = EncogUtility.SimpleFeedForward(2, 5, 0, 1, false); CheckWithModel(model.Structure.Flat, network.Structure.Flat); Assert.IsTrue(XOR.VerifyXOR(network, 0.10)); }
private void TrainNetwork() { network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(Config.TrainedNetworkFile); trainingSet = EncogUtility.LoadCSV2Memory(Config.NormalizedTrainingFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false); crossValidationSet = EncogUtility.LoadCSV2Memory(Config.NormalizedCrossValidationFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false); train = new ResilientPropagation(network, trainingSet); IterationDataCollection.Clear(); CVIterationDataCollection.Clear(); IterationLogs.Clear(); trainWorker.RunWorkerAsync(); }
/// <summary> /// Obtain the training set. /// </summary> /// <returns>The training set.</returns> private IMLDataSet ObtainTrainingSet() { String trainingID = Prop.GetPropertyString( ScriptProperties.MlConfigTrainingFile); FileInfo trainingFile = Script.ResolveFilename(trainingID); IMLDataSet trainingSet = EncogUtility.LoadEGB2Memory(trainingFile); if (_kfold > 0) { trainingSet = new FoldedDataSet(trainingSet); } return(trainingSet); }
/// <summary> /// Calculate the error for the given method and dataset. /// </summary> /// <param name="method">The method to use.</param> /// <param name="data">The data to use.</param> /// <returns>The error.</returns> public double CalculateError(IMLMethod method, IMLDataSet data) { if (_dataset.NormHelper.OutputColumns.Count == 1) { ColumnDefinition cd = _dataset.NormHelper .OutputColumns[0]; if (cd.DataType == ColumnType.Nominal) { return(EncogUtility.CalculateClassificationError( (IMLClassification)method, data)); } } return(EncogUtility.CalculateRegressionError((IMLRegression)method, data)); }
/// <summary> /// Builds and trains a neat network. /// </summary> /// <param name="aset">The IMLDataset.</param> /// <param name="inputcounts">The inputcounts.</param> /// <param name="outputcounts">The outputcounts.</param> /// <param name="populationsize">The populationsize.</param> /// <param name="ToErrorTraining">To error rate you want to train too.</param> /// <returns>a trained netnetwork.</returns> public static NEATNetwork BuildTrainNeatNetwork(IMLDataSet aset, int inputcounts, int outputcounts, int populationsize, double ToErrorTraining) { NEATPopulation pop = new NEATPopulation(inputcounts, outputcounts, populationsize); ICalculateScore score = new TrainingSetScore(aset); // train the neural network ActivationStep step = new ActivationStep(); step.Center = 0.5; pop.OutputActivationFunction = step; NEATTraining train = new NEATTraining(score, pop); EncogUtility.TrainToError(train, ToErrorTraining); NEATNetwork network = (NEATNetwork)train.Method; return(network); }
private static void ExtractTrainData(FileInfo trainFile) { var ctx = new Db(); int inputsCount = ctx.TrainSchemas.Where(x => x.NetworkID == networkID && x.Input == true).Select(x => x.NeuronID).Distinct().Count(); int outputsCount = ctx.TrainSchemas.Where(x => x.NetworkID == networkID && x.Input == false).Select(x => x.NeuronID).Distinct().Count(); Console.WriteLine($"in: {inputsCount}, out: {outputsCount}"); var dataset = new BasicMLDataSet(); int[] articles = ctx.TrainValues.Where(x => x.NetworkID == networkID).Select(x => x.ArticleID).Distinct().OrderBy(i => i).ToArray(); var schema = ctx.TrainSchemas.Where(x => x.NetworkID == networkID).ToArray(); int shingles; int forecasts; foreach (int articleID in articles) { var values = ctx.TrainValues.Where(x => x.NetworkID == networkID && x.ArticleID == articleID); var inputsArray = new double[inputsCount]; var idealsArray = new double[outputsCount]; shingles = 0; forecasts = 0; foreach (var value in values) { if (schema.FirstOrDefault(x => x.NeuronID == value.NeuronID).Input) { shingles++; inputsArray[value.NeuronID - 1] = value.Value; } else { forecasts++; idealsArray[value.NeuronID - inputsCount - 2] = value.Value; } } if (shingles > 0 && forecasts == outputsCount) { dataset.Add(new BasicMLData(inputsArray), new BasicMLData(idealsArray)); Console.WriteLine($"Article {articleID}: {shingles} shingles, {idealsArray[outputsCount - 1] * 100:N2}"); } else { Console.WriteLine($"Article {articleID} NOT complete !!!"); } } EncogUtility.SaveEGB(trainFile, dataset); Console.WriteLine($"Trainset written to: {trainFile.FullName}"); }
public void Execute(IExampleInterface app) { this.app = app; this.app = app; IMLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL); BasicNetwork network = EncogUtility.SimpleFeedForward(2, 6, 0, 1, false); EncogUtility.TrainToError(network, trainingSet, 0.01); double error = network.CalculateError(trainingSet); SerializeObject.Save("encog.ser", network); network = (BasicNetwork)SerializeObject.Load("encog.ser"); double error2 = network.CalculateError(trainingSet); app.WriteLine("Error before save to ser: " + Format.FormatPercent(error)); app.WriteLine("Error before after to ser: " + Format.FormatPercent(error2)); }
/// <summary> /// Calculate the error for this SVM. /// </summary> /// /// <param name="data">The training set.</param> /// <returns>The error percentage.</returns> public double CalculateError(IMLDataSet data) { switch (SVMType) { case SVMType.SupportVectorClassification: case SVMType.NewSupportVectorClassification: case SVMType.SupportVectorOneClass: return(EncogUtility.CalculateClassificationError(this, data)); case SVMType.EpsilonSupportVectorRegression: case SVMType.NewSupportVectorRegression: return(EncogUtility.CalculateRegressionError(this, data)); default: return(EncogUtility.CalculateRegressionError(this, data)); } }
private void Learn_Click(object sender, RoutedEventArgs e) { var downsample = new Downsampler(); var training = new ImageMLDataSet(downsample, true, 1, -1); for (var i = 0; i < Images.Count; ++i) { var ideal = new BasicMLData(DIGITS_COUNT); for (int j = 0; j < DIGITS_COUNT; ++j) { if (j == i) { ideal[j] = 1; } else { ideal[j] = -1; } } foreach (var img in Images[i]) { MemoryStream stream = new MemoryStream(); BitmapEncoder encoder = new BmpBitmapEncoder(); encoder.Frames.Add(BitmapFrame.Create(img)); encoder.Save(stream); var bitmap = new Drawing.Bitmap(stream); var data = new ImageMLData(bitmap); training.Add(data, ideal); } } training.Downsample(DIGIT_HEIGHT, DIGIT_WIDTH); network = EncogUtility.SimpleFeedForward(training.InputSize, 35, 0, training.IdealSize, true); double strategyError = 0.01; int strategyCycles = 2000; var train = new ResilientPropagation(network, training); //train.AddStrategy(new ResetStrategy(strategyError, strategyCycles)); EncogUtility.TrainDialog(train, network, training); EncogDirectoryPersistence.SaveObject(new FileInfo("network.eg"), network); }
private static void TrainNetwork() { var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(Config.TrainedNetworkFile); var trainingSet = EncogUtility.LoadCSV2Memory(Config.NormalizedTrainingFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false); var train = new ResilientPropagation(network, trainingSet); var epoch = 1; do { train.Iteration(); Console.WriteLine("Epoch : {0} Error : {1}", epoch, train.Error); epoch++; } while (train.Error > 0.01); EncogDirectoryPersistence.SaveObject(Config.TrainedNetworkFile, network); }
public void TestPruneNeuronOutput() { BasicNetwork network = ObtainNetwork(); Assert.AreEqual(4, network.OutputCount); PruneSelective prune = new PruneSelective(network); prune.Prune(2, 1); Assert.AreEqual(21, network.EncodedArrayLength()); Assert.AreEqual(3, network.GetLayerNeuronCount(2)); Assert.AreEqual("1,2,3,4,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25", network.DumpWeights()); BasicNetwork model = EncogUtility.SimpleFeedForward(2, 3, 0, 3, false); CheckWithModel(model.Structure.Flat, network.Structure.Flat); Assert.AreEqual(3, network.OutputCount); }
public void Evaluate(FileInfo networkFile, FileInfo analystFile, FileInfo EvaluationFile) { var network = EncogDirectoryPersistence.LoadObject(networkFile) as BasicNetwork; var analyst = new EncogAnalyst(); analyst.Load(analystFile); var evaluationSet = EncogUtility.LoadCSV2Memory(EvaluationFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false); int count = 0; int correctCount = 0; foreach (var item in evaluationSet) { var sepal_l = analyst.Script.Normalize.NormalizedFields[0].DeNormalize(item.Input[0]); var sepal_w = analyst.Script.Normalize.NormalizedFields[1].DeNormalize(item.Input[1]); var petal_l = analyst.Script.Normalize.NormalizedFields[2].DeNormalize(item.Input[2]); var petal_w = analyst.Script.Normalize.NormalizedFields[3].DeNormalize(item.Input[3]); int classCount = analyst.Script.Normalize.NormalizedFields[4].Classes.Count; double normalizationHigh = analyst.Script.Normalize.NormalizedFields[4].NormalizedHigh; double normalizationLow = analyst.Script.Normalize.NormalizedFields[4].NormalizedLow; var output = network.Compute(item.Input); var resulter = new Equilateral(classCount, normalizationHigh, normalizationLow); var predictedClassInt = resulter.Decode(output); var predictedClass = analyst.Script.Normalize.NormalizedFields[4].Classes[predictedClassInt].Name; var idealClassInt = resulter.Decode(item.Ideal); var idealClass = analyst.Script.Normalize.NormalizedFields[4].Classes[idealClassInt].Name; if (predictedClassInt == idealClassInt) { ++correctCount; } Console.WriteLine($"Count: {++count} | Ideal: {idealClass} Predicted:{predictedClass}"); } Console.WriteLine($"Total test count: {count}"); Console.WriteLine($"Total correct test count: {correctCount}"); Console.WriteLine($"% Success: {(correctCount*100.0)/count}"); }
// // You can use the following additional attributes as you write your tests: // // Use ClassInitialize to run code before running the first test in the class // [ClassInitialize()] // public static void MyClassInitialize(TestContext testContext) { } // // Use ClassCleanup to run code after all tests in a class have run // [ClassCleanup()] // public static void MyClassCleanup() { } // // Use TestInitialize to run code before running each test // [TestInitialize()] // public void MyTestInitialize() { } // // Use TestCleanup to run code after each test has run // [TestCleanup()] // public void MyTestCleanup() { } // #endregion private BasicNetwork ObtainNetwork() { BasicNetwork network = EncogUtility.SimpleFeedForward(2, 3, 0, 4, false); double[] weights = { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25 }; NetworkCODEC.ArrayToNetwork(weights, network); Assert.AreEqual(1.0, network.GetWeight(1, 0, 0), 0.01); Assert.AreEqual(2.0, network.GetWeight(1, 1, 0), 0.01); Assert.AreEqual(3.0, network.GetWeight(1, 2, 0), 0.01); Assert.AreEqual(4.0, network.GetWeight(1, 3, 0), 0.01); Assert.AreEqual(5.0, network.GetWeight(1, 0, 1), 0.01); Assert.AreEqual(6.0, network.GetWeight(1, 1, 1), 0.01); Assert.AreEqual(7.0, network.GetWeight(1, 2, 1), 0.01); Assert.AreEqual(8.0, network.GetWeight(1, 3, 1), 0.01); Assert.AreEqual(9.0, network.GetWeight(1, 0, 2), 0.01); Assert.AreEqual(10.0, network.GetWeight(1, 1, 2), 0.01); Assert.AreEqual(11.0, network.GetWeight(1, 2, 2), 0.01); Assert.AreEqual(12.0, network.GetWeight(1, 3, 2), 0.01); Assert.AreEqual(13.0, network.GetWeight(1, 0, 3), 0.01); Assert.AreEqual(14.0, network.GetWeight(1, 1, 3), 0.01); Assert.AreEqual(15.0, network.GetWeight(1, 2, 3), 0.01); Assert.AreEqual(16.0, network.GetWeight(1, 3, 3), 0.01); Assert.AreEqual(17.0, network.GetWeight(0, 0, 0), 0.01); Assert.AreEqual(18.0, network.GetWeight(0, 1, 0), 0.01); Assert.AreEqual(19.0, network.GetWeight(0, 2, 0), 0.01); Assert.AreEqual(20.0, network.GetWeight(0, 0, 1), 0.01); Assert.AreEqual(21.0, network.GetWeight(0, 1, 1), 0.01); Assert.AreEqual(22.0, network.GetWeight(0, 2, 1), 0.01); Assert.AreEqual(20.0, network.GetWeight(0, 0, 1), 0.01); Assert.AreEqual(21.0, network.GetWeight(0, 1, 1), 0.01); Assert.AreEqual(22.0, network.GetWeight(0, 2, 1), 0.01); Assert.AreEqual(23.0, network.GetWeight(0, 0, 2), 0.01); Assert.AreEqual(24.0, network.GetWeight(0, 1, 2), 0.01); Assert.AreEqual(25.0, network.GetWeight(0, 2, 2), 0.01); return(network); }
/// <summary> /// Program entry point. /// </summary> /// <param name="args">Not used.</param> public static void Main(String[] args) { BasicNetwork network = new BasicNetwork(); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1)); network.Structure.FinalizeStructure(); network.Reset(); IMLDataSet trainingSet = new BasicMLDataSet(XOR_INPUT, XOR_IDEAL); EncogUtility.TrainToError(network, trainingSet, 0.01); // test the neural network Console.WriteLine("Neural Network Results:"); EncogUtility.Evaluate(network, trainingSet); }