/// <summary> /// Evaluate memory. /// </summary> private void EvalMemory() { BasicNeuralDataSet training = RandomTrainingFactory.Generate( 1000, 10000, 10, 10, -1, 1); long stop = (10 * Evaluate.MILIS); int record = 0; INeuralDataPair pair = BasicNeuralDataPair.CreatePair(10, 10); int iterations = 0; Stopwatch watch = new Stopwatch(); watch.Start(); while (watch.ElapsedMilliseconds < stop) { iterations++; training.GetRecord(record++, pair); if (record >= training.Count) { record = 0; } } iterations /= 100000; this.report.Report(EncogBenchmark.STEPS, EncogBenchmark.STEP3, "Memory dataset, result: " + Format.FormatInteger(iterations)); this.memoryScore = iterations; }
public void CalculateAnswer() { var values = new List <double[]>(); var answers = new List <double[]>(); var outputSize = parameters.ProblemType == ProblemTypeEnum.Classification ? classCount : parameters.CountOutput; foreach (var input in testSet) { var output = new double[outputSize]; network.Compute(input, output); values.Add(input); answers.Add(output); } for (int i = 0; i < values.Count; i++) { for (int argc = 0; argc < values[i].Length; argc++) { values[i][argc] = ArgNormalize[argc].DeNormalize(values[i][argc]); } } if (parameters.ProblemType == ProblemTypeEnum.Regression) { for (int i = 0; i < values.Count; i++) { answers[i][0] = YNormalize.DeNormalize(answers[i][0]); } // DenormalizationRegg(values, false); //DenormalizationRegg(answers,true); } AnswerSet = new BasicNeuralDataSet(values.ToArray(), answers.ToArray()); }
static void Main(string[] args) { int dimension = 2; int numNeuronsPerDimension = 4; double volumeNeuronWidth = 2.0 / numNeuronsPerDimension;; bool includeEdgeRBFs = true; RBFNetwork n = new RBFNetwork(dimension, numNeuronsPerDimension, 1, RBFEnum.Gaussian); n.SetRBFCentersAndWidthsEqualSpacing(0, 1, RBFEnum.Gaussian, volumeNeuronWidth, includeEdgeRBFs); INeuralDataSet trainingSet = new BasicNeuralDataSet(XORInput, XORIdeal); SVDTraining train = new SVDTraining(n, trainingSet); int epoch = 1; do { train.Iteration(); Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error); epoch++; } while ((epoch < 1) && (train.Error > 0.001)); Console.WriteLine(@"Neural Network Results:"); foreach (IMLDataPair pair in trainingSet) { IMLData output = n.Compute(pair.Input); Console.WriteLine(pair.Input[0] + @"," + pair.Input[1] + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]); } Console.Read(); }
public double MeasurePerformance(IMLRegression network, BasicNeuralDataSet dataset, IActivationFunction activationFunction) { int correctBits = 0; const float Threshold = 0.0f; if (!(activationFunction is ActivationTANH)) { throw new ArgumentException("Bad activation function"); } int n = (int)dataset.Count; for (int inputIndex = 0; inputIndex < n; inputIndex++) { var actualOutputs = network.Compute(dataset.Data[inputIndex].Input); for (int outputIndex = 0, k = actualOutputs.Count; outputIndex < k; outputIndex++) { if (IsBothPositiveBits(actualOutputs[outputIndex], dataset.Data[inputIndex].Ideal[outputIndex], Threshold) || IsBothNegativeBits(actualOutputs[outputIndex], dataset.Data[inputIndex].Ideal[outputIndex], Threshold)) { correctBits++; } } } long totalBitsCount = dataset.Count * dataset.Data[0].Ideal.Count; return((double)correctBits / totalBitsCount); }
public static void Train(PointsConverted pointsConvertedA, PointsConverted pointsConvertedB = null, bool debug = true) { network = new BasicNetwork(); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, N_input)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, N_hidden)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, N_output)); network.Structure.FinalizeStructure(); network.Reset(); pointsConvertedA.Juntar(pointsConvertedB); var trainingSet = new BasicNeuralDataSet(pointsConvertedA.entrada, pointsConvertedA.saida); var train = new Backpropagation(network, trainingSet); var epoch = 0; do { train.Iteration(); if (debug) { Console.WriteLine("Epoch #" + epoch + " Error:" + train.Error); } epoch++; }while ((epoch <= 20000) || (train.Error > 0.001)); }
public static void Main() { BasicNetwork network = new BasicNetwork(); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 6)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1)); network.Structure.FinalizeStructure(); network.Reset(); INeuralDataSet trainingSet = new BasicNeuralDataSet(XorInput, XorIdeal); ITrain train = new ResilientPropagation(network, trainingSet); int epoch = 1; var timer = Stopwatch.StartNew(); do { train.Iteration(); epoch++; } while ((epoch < 50000) && (train.Error > 0.0001)); timer.Stop(); Console.WriteLine("Neural Network Results:"); foreach (var pair in trainingSet) { var output = network.Compute(pair.Input); Console.WriteLine(pair.Input[0] + "," + pair.Input[1] + ", actual=" + output[0] + ", ideal=" + pair.Ideal[0]); } Console.WriteLine($"Completed {epoch} epochs in {timer.Elapsed} ({(float)timer.ElapsedMilliseconds / epoch} ms per epoch)"); Console.ReadLine(); }
private double CalculateClassificationError(BasicNeuralDataSet trainingSet) { int errorCount = 0; foreach (var trainData in trainingSet) { IMLData output = _network.Compute(trainData.Input); IMLData ideal = trainData.Ideal; double maxValue = Double.MinValue; int maxIndex = 0; for (int i = 0; i < output.Count; ++i) { if (maxValue < output[i]) { maxValue = output[i]; maxIndex = i; } } if (Math.Abs(ideal[maxIndex] - 1) > 0.0001) { errorCount++; } } return((double)errorCount / trainingSet.Count); }
/// <summary> /// Load the specified Encog object from an XML reader. /// </summary> /// <param name="xmlIn">The XML reader to use.</param> /// <returns>The loaded object.</returns> public IEncogPersistedObject Load(ReadXML xmlIn) { String name = xmlIn.LastTag.Attributes[ EncogPersistedCollection.ATTRIBUTE_NAME]; String description = xmlIn.LastTag.GetAttributeValue( EncogPersistedCollection.ATTRIBUTE_DESCRIPTION); this.currentDataSet = new BasicNeuralDataSet(); this.currentDataSet.Name = name; this.currentDataSet.Description = description; while (xmlIn.ReadToTag()) { if (xmlIn.IsIt(BasicNeuralDataSetPersistor.TAG_ITEM, true)) { HandleItem(xmlIn); } else if (xmlIn.IsIt(EncogPersistedCollection.TYPE_TRAINING, false)) { break; } } return(this.currentDataSet); }
static void Main(string[] args) { INeuralDataSet trainingSet = new BasicNeuralDataSet(AndInput, AndIdeal); var network = new BasicNetwork(); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1)); network.Structure.FinalizeStructure(); network.Reset(); ITrain train = new ResilientPropagation(network, trainingSet); int epoch = 1; do { train.Iteration(); Console.WriteLine($"Epoch no {epoch}. Error: {train.Error}"); epoch++; } while ((epoch < MaxEpoch) && (train.Error > AcceptableError)); Console.WriteLine("\nAnd function Results:"); foreach (IMLDataPair pair in trainingSet) { IMLData output = network.Compute(pair.Input); Console.WriteLine($"{pair.Input[0]} AND {pair.Input[1]} should be: {pair.Ideal[0]} actual value is: {output[0]}"); } Console.ReadKey(); }
public void Prune() { INeuralDataSet trainingSet = new BasicNeuralDataSet(networkInput, networkIdealOutput); FeedForwardPattern pattern = new FeedForwardPattern(); pattern.InputNeurons = INPUT_NEURONS; pattern.OutputNeurons = OUTPUT_NEURONS; if (ACTIVIATION_FUNCTION == 1) pattern.ActivationFunction = new ActivationSigmoid(); else if (ACTIVIATION_FUNCTION == 2) pattern.ActivationFunction = new ActivationTANH(); else throw new System.Exception("Only 2 activation functions have been impletemented."); PruneIncremental prune = new PruneIncremental(trainingSet, pattern, 200, new ConsoleStatusReportable()); prune.AddHiddenLayer(10, 40); prune.AddHiddenLayer(0, 30); prune.Process(); network = prune.BestNetwork; Console.WriteLine("Prune process complete."); }
static void Main(string[] args) { int dimension = 2; // XORInput provides two-dimensional inputs. Not 8. /* * If XORInput is 8 dimensional it should be like this: * * public static double[][] XORInput = { * new[] {0.0, 0.0,0.0, 0.0,0.0, 0.0,0.0, 0.0}, * . * . * .*/ int numNeuronsPerDimension = 4; // could be also 16, 64, 256. I suppose it should accept 8, 32 but it needs additional investigation double volumeNeuronWidth = 2.0 / numNeuronsPerDimension; bool includeEdgeRBFs = true; RBFNetwork n = new RBFNetwork(dimension, numNeuronsPerDimension, 1, RBFEnum.Gaussian); n.SetRBFCentersAndWidthsEqualSpacing(0, 1, RBFEnum.Gaussian, volumeNeuronWidth, includeEdgeRBFs); //n.RandomizeRBFCentersAndWidths(0, 1, RBFEnum.Gaussian); INeuralDataSet trainingSet = new BasicNeuralDataSet(XORInput, XORIdeal); SVDTraining train = new SVDTraining(n, trainingSet); int epoch = 1; do { train.Iteration(); Console.WriteLine("Epoch #" + epoch + " Error:" + train.Error); epoch++; } while ((epoch < 1) && (train.Error > 0.001)); }
/// <summary> /// Generate a random training set. /// </summary> /// <param name="seed">The seed value to use, the same seed value will always produce /// the same results.</param> /// <param name="count">How many training items to generate.</param> /// <param name="inputCount">How many input numbers.</param> /// <param name="idealCount">How many ideal numbers.</param> /// <param name="min">The minimum random number.</param> /// <param name="max">The maximum random number.</param> /// <returns>The random training set.</returns> public static BasicNeuralDataSet Generate(long seed, int count, int inputCount, int idealCount, double min, double max) { LinearCongruentialGenerator rand = new LinearCongruentialGenerator(seed); BasicNeuralDataSet result = new BasicNeuralDataSet(); for (int i = 0; i < count; i++) { INeuralData inputData = new BasicNeuralData(inputCount); for (int j = 0; j < inputCount; j++) { inputData.Data[j] = rand.Range(min, max); } INeuralData idealData = new BasicNeuralData(idealCount); for (int j = 0; j < idealCount; j++) { idealData[j] = rand.Range(min, max); } BasicNeuralDataPair pair = new BasicNeuralDataPair(inputData, idealData); result.Add(pair); } return(result); }
private void TrainNetwork(DateTime trainFrom, DateTime trainTo, TrainingStatus status) { if (_input == null || _ideal == null) { CreateTrainingSets(trainFrom, trainTo); } _trainThread = Thread.CurrentThread; int epoch = 1; ITrain train = null; try { var trainSet = new BasicNeuralDataSet(_input, _ideal); train = new ResilientPropagation(_network, trainSet); double error; do { train.Iteration(); error = train.Error; status?.Invoke(epoch, error, TrainingAlgorithm.Resilient); epoch++; } while (error > MaxError); } catch (ThreadAbortException) { _trainThread = null; } finally { train?.FinishTraining(); } _trainThread = null; }
public static BasicNetwork TrainNetwork(BasicNetwork network, double[][] trainingData, double[][] ideals, int maxIterationsBeforeCompletion = 5000, double tolerance = 0.001, string connectionId = null, Func <string, string, bool, bool> OutputData = null) { //Check we have data and a model to train if (trainingData.Any() && ideals.Any() && network != null) { //Create Dataset - data and correct classifications (matched by position) INeuralDataSet trainingSet = new BasicNeuralDataSet(trainingData, ideals); //Proagate the data through the network ITrain train = new ResilientPropagation(network, trainingSet); //Set the iteration count to 0 var epoch = 0; //Train do { train.Iteration(); //If the delegate is defined, output the progress to it if (OutputData != null) { OutputData(connectionId, "Epoch #" + epoch + " Error:" + train.Error, true); } epoch++; } while ((epoch < maxIterationsBeforeCompletion) && (train.Error > tolerance)); } //Return the trained network return(network); }
public void LoadCSVFileTrainingClass() { var path = parameters.TrainFile; var csvRead = new ReadCSV(new FileStream(path, FileMode.Open), true, CSVFormat.DecimalPoint); var values = new List <double[]>(); var classes = new List <double[]>(); while (csvRead.Next()) { values.Add(new double[2] { csvRead.GetDouble(0), csvRead.GetDouble(1) }); classes.Add(new double[1] { csvRead.GetDouble(2) }); } csvRead.Close(); var min = parameters.FunctionType == FunctionTypeEnum.Unipolar ? 0d : -1d; ArgNormalize = new NormalizedField[2] { new NormalizedField(NormalizationAction.Normalize, "X", values.Max(v => v[0]), values.Min(v => v[0]), 1.0, min), new NormalizedField(NormalizationAction.Normalize, "Y", values.Max(v => v[1]), values.Min(v => v[1]), 1.0, min) }; for (int i = 0; i < values.Count; i++) { for (int argc = 0; argc < values[i].Length; argc++) { values[i][argc] = ArgNormalize[argc].Normalize(values[i][argc]); } } //Normalization(values); classCount = classes.Select(c => c[0]).Distinct().Count(); var normalizeClasses = new List <double[]>(); for (int i = 0; i < classes.Count; ++i) { var newClasses = new double[classCount]; for (int j = 0; j < newClasses.Length; j++) { newClasses[j] = min; } newClasses[(int)classes[i][0] - 1] = 1;// dodoac normalizacje na -1 normalizeClasses.Add(newClasses); } var trainSetCount = (int)((double)values.Count * ((100.0 - 15) / 100)); values.Shuffle(); normalizeClasses.Shuffle(); MyExtensions.ResetStableShuffle(); TrainSet = new BasicNeuralDataSet(values.Take(trainSetCount).ToArray(), normalizeClasses.Take(trainSetCount).ToArray()); ValidationSet = new BasicNeuralDataSet(values.Skip(trainSetCount).ToArray(), normalizeClasses.Skip(trainSetCount).ToArray()); }
public Task <Network> Train(NetworkConfiguration networkConfiguration, int[] spectralImagesIndexesToConsider, TrainingCallback callback) { var network = networkFactory.Create( networkConfiguration.ActivationFunction, DefaultFingerprintSize, networkConfiguration.HiddenLayerCount, networkConfiguration.OutputCount); var spectralImagesToTrain = trainingDataProvider.GetSpectralImagesToTrain( spectralImagesIndexesToConsider, (int)System.Math.Pow(2, networkConfiguration.OutputCount)); var trainingSet = trainingDataProvider.MapSpectralImagesToBinaryOutputs( spectralImagesToTrain, networkConfiguration.OutputCount); normalizeStrategy.NormalizeInputInPlace(networkConfiguration.ActivationFunction, trainingSet.Inputs); normalizeStrategy.NormalizeOutputInPlace(networkConfiguration.ActivationFunction, trainingSet.Outputs); return(Task.Factory.StartNew( () => { var dataset = new BasicNeuralDataSet(trainingSet.Inputs, trainingSet.Outputs); var learner = new Backpropagation(network, dataset); double correctOutputs = 0.0; for (int idynIndex = 0; idynIndex < Idyn; idynIndex++) { correctOutputs = networkPerformanceMeter.MeasurePerformance( network, dataset, networkConfiguration.ActivationFunction); callback(TrainingStatus.OutputReordering, correctOutputs, learner.Error, idynIndex * Edyn); var bestPairs = GetBestPairsForReordering( (int)System.Math.Pow(2, networkConfiguration.OutputCount), network, spectralImagesToTrain, trainingSet); ReorderOutputsAccordingToBestPairs(bestPairs, trainingSet, dataset); for (int edynIndex = 0; edynIndex < Edyn; edynIndex++) { correctOutputs = networkPerformanceMeter.MeasurePerformance( network, dataset, networkConfiguration.ActivationFunction); callback( TrainingStatus.RunningDynamicEpoch, correctOutputs, learner.Error, (idynIndex * Edyn) + edynIndex); learner.Iteration(); } } for (int efixedIndex = 0; efixedIndex < Efixed; efixedIndex++) { correctOutputs = networkPerformanceMeter.MeasurePerformance( network, dataset, networkConfiguration.ActivationFunction); callback( TrainingStatus.FixedTraining, correctOutputs, learner.Error, (Idyn * Edyn) + efixedIndex); learner.Iteration(); } network.ComputeMedianResponses(trainingSet.Inputs, TrainingSongSnippets); callback(TrainingStatus.Finished, correctOutputs, learner.Error, (Idyn * Edyn) + Efixed); return network; })); }
private static EncogTrainingResponse TrainNetwork2(BasicNetwork network, TrainingData training, double maxError, CancellationToken cancelToken, double?maxSeconds = null) { //TODO: When the final layer is softmax, the error seems to be higher. Probably because the training outputs need to be run through softmax const int MAXITERATIONS = 5000; INeuralDataSet trainingSet = new BasicNeuralDataSet(training.Input, training.Output); ITrain train = new ResilientPropagation(network, trainingSet); DateTime startTime = DateTime.UtcNow; TimeSpan?maxTime = maxSeconds != null?TimeSpan.FromSeconds(maxSeconds.Value) : (TimeSpan?)null; bool success = false; //List<double> log = new List<double>(); int iteration = 1; double error = double.MaxValue; while (true) { if (cancelToken.IsCancellationRequested) { break; } train.Iteration(); error = train.Error; //log.Add(error); iteration++; if (double.IsNaN(error)) { break; } else if (error < maxError) { success = true; break; } else if (iteration >= MAXITERATIONS) { break; } else if (maxTime != null && DateTime.UtcNow - startTime > maxTime) { break; } } //string logExcel = string.Join("\r\n", log); // paste this into excel and chart it to see the error trend train.FinishTraining(); return(new EncogTrainingResponse(network, success, error, iteration, (DateTime.UtcNow - startTime).TotalSeconds)); }
/// <summary> /// Load the binary dataset to memory. Memory access is faster. /// </summary> /// <returns>A memory dataset.</returns> public INeuralDataSet LoadToMemory() { BasicNeuralDataSet result = new BasicNeuralDataSet(); foreach (INeuralDataPair pair in this) { result.Add(pair); } return(result); }
private BasicNeuralDataSet PrepareRegressionSet(double[][] data) { var normalizedData = _normalizer.Normalize(data); var input = normalizedData.Select(row => row.Take(_inputLayerSize).ToArray()).ToArray(); var ideal = normalizedData.Select(row => row.Skip(_inputLayerSize).ToArray()).ToArray(); var trainingSet = new BasicNeuralDataSet(input, ideal); return(trainingSet); }
private BasicNetwork BuildModel(TimeSeries simulatedData, out double scale_factor) { double[][] inputs = new double[simulatedData.Count][]; scale_factor = 0; for (int i = 0; i < simulatedData.Count; ++i) { if (scale_factor < simulatedData[i]) { scale_factor = simulatedData[i]; } } for (int i = 0; i < simulatedData.Count; ++i) { inputs[i] = new double[mWindowSize]; for (int j = 0; j < mWindowSize; ++j) { int index = i - (mWindowSize - j); if (index >= 0) { inputs[i][j] = simulatedData[index] / scale_factor; } } } double[][] outputs = new double[simulatedData.Count][]; for (int i = 0; i < simulatedData.Count; ++i) { outputs[i] = new double[1] { simulatedData[i] / scale_factor }; } INeuralDataSet trainingSet = new BasicNeuralDataSet(inputs, outputs); BasicNetwork network = new BasicNetwork(); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, mWindowSize)); for (int i = 0; i < mNumberHiddenLayer; ++i) { network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, mWindowSize)); } network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1)); network.Structure.FinalizeStructure(); network.Reset(); Train(network, trainingSet); return(network); }
public override List <double> Train(List <Tuple <List <double>, List <double> > > trainSet, int iterations) { var errors = new List <double>(iterations); INeuralDataSet trainingSet = new BasicNeuralDataSet(trainSet.Select(e => e.Item1.ToArray()).ToArray(), trainSet.Select(e => e.Item2.ToArray()).ToArray()); ITrain train = new Backpropagation(network, trainingSet, this.learningRate, this.momentum); var epoch = 0; do { train.Iteration(); epoch++; errors.Add(train.Error); } while (epoch < iterations); return(errors); }
public static void TestNetwork(BasicNetwork network, double[][] trainingData, double[][] ideals, string connectionId, Func <string, string, bool, bool> OutputData = null) { //Create Dataset - data and correct classifications (matched by position) var trainingSet = new BasicNeuralDataSet(trainingData, ideals); foreach (var pair in trainingSet) { var output = network.Compute(pair.Input); //Columns: |Rain|,|Wind|,|Sun|,|Cloud/Overcast|,|Snow| if (OutputData != null) { OutputData(connectionId, FormatOutput(pair, output), false); } } }
/// <summary> /// Evaluate disk. /// </summary> private void EvalBinary() { String file = "temp.egb"; BasicNeuralDataSet training = RandomTrainingFactory.Generate( 1000, 10000, 10, 10, -1, 1); // create the binary file File.Delete(file); BufferedNeuralDataSet training2 = new BufferedNeuralDataSet(file); training2.Load(training); long stop = (10 * Evaluate.MILIS); int record = 0; INeuralDataPair pair = BasicNeuralDataPair.CreatePair(10, 10); Stopwatch watch = new Stopwatch(); watch.Start(); int iterations = 0; while (watch.ElapsedMilliseconds < stop) { iterations++; training2.GetRecord(record++, pair); if (record >= training2.Count) { record = 0; } } training2.Close(); iterations /= 100000; this.report.Report(EncogBenchmark.STEPS, EncogBenchmark.STEP4, "Disk(binary) dataset, result: " + Format.FormatInteger(iterations)); File.Delete(file); this.binaryScore = iterations; }
public void MainTestEncog() { int classesCount = 0; int attributesCount = 0; var epoch = 0; var trainData = CsvParser.Parse("./../../../DataSets/data.train.csv", ref classesCount, ref attributesCount).NormalizedData; var testData = CsvParser.Parse("./../../../DataSets/data.train.csv", ref classesCount, ref attributesCount).NormalizedData; var correct = 0; var network = new BasicNetwork(); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 4)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 5)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3)); network.Structure.FinalizeStructure(); network.Reset(); INeuralDataSet trainingSet = new BasicNeuralDataSet(trainData.Select(e => e.Item1.ToArray()).ToArray(), trainData.Select(e => e.Item2.ToArray()).ToArray()); ITrain train = new Backpropagation(network, trainingSet, 0.3, 0.6); //ITrain train = new ResilientPropagation(network, trainingSet); do { train.Iteration(); epoch++; } while ((epoch < 15000) && (train.Error > 0.001)); foreach (IMLDataPair pair in trainingSet) { var output = network.Compute(pair.Input); //pair.Ideal if (Network.GetClass(new List <double>() { output[0], output[1], output[2] }) == Network.GetClass(new List <double>() { pair.Ideal[0], pair.Ideal[1], pair.Ideal[2] })) { correct++; } } }
static void Main(string[] args) { INeuralDataSet trainingSet = new BasicNeuralDataSet(AndInput, AndIdeal); var network = new BasicNetwork(); network.AddLayer(new BasicLayer(new ActivationRamp(), true, 25)); network.AddLayer(new BasicLayer(new ActivationRamp(), true, 75)); network.AddLayer(new BasicLayer(new ActivationRamp(), true, 50)); network.AddLayer(new BasicLayer(new ActivationRamp(), true, 20)); network.Structure.FinalizeStructure(); network.Reset(); ITrain train = new Backpropagation(network, trainingSet, 0.02, 0.3); int epoch = 1; do { train.Iteration(); Console.WriteLine($"{train.Error}"); epoch++; } while ((epoch < MaxEpoch) && (train.Error > AcceptableError)); var input = new BasicMLData(new double[25] { 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 1 }); int best = network.Winner(input); Console.WriteLine($"Rozpoznano: {_literki[best]}"); foreach (IMLDataPair pair in trainingSet) { IMLData output = network.Compute(pair.Input); Console.WriteLine($"wynik : { output[0].ToString("0.000")} {output[1].ToString("0.000")} {output[2].ToString("0.000")} {output[3].ToString("0.000")} {output[4].ToString("0.000")} {output[5].ToString("0.000")} {output[6].ToString("0.000")} {output[7].ToString("0.000")} {output[8].ToString("0.000")} {output[9].ToString("0.000")} {output[10].ToString("0.000")} {output[11].ToString("0.000")} {output[12].ToString("0.000")} {output[13].ToString("0.000")} {output[14].ToString("0.000")} {output[15].ToString("0.000")} {output[16].ToString("0.000")} {output[17].ToString("0.000")} {output[18].ToString("0.000")} {output[19].ToString("0.000")}"); } Console.ReadKey(); }
public NetworkState TrainNetwork() { int epoch = 0; _trainingSet = new BasicNeuralDataSet(_annInputs, _annOutputs); _basicNetwork = new BasicNetwork(); _basicNetwork.AddLayer(new BasicLayer(null, true, _nInputNeurons)); _basicNetwork.AddLayer(new BasicLayer(new ActivationSigmoid(), true, _nHiddenNeurons)); _basicNetwork.AddLayer(new BasicLayer(new ActivationSigmoid(), false, _nOutputNeurons)); _basicNetwork.Structure.FinalizeStructure(); _basicNetwork.Reset(); //Distribuisco numeri casuali[-1,1] lavorando in ambiente stocastico(non deterministico). //In questo modo il training avviene in maniera casuale, partendo sempre dallo stesso stato. new ConsistentRandomizer(-1, 1, 100).Randomize(_basicNetwork); Backpropagation train = new Backpropagation(_basicNetwork, _trainingSet, LearnRate, Momentum); train.FixFlatSpot = false; do { train.Iteration(); epoch++; _trainError = train.Error; BufferTrainError.Add(_trainError); } while (train.Error > _error); train.FinishTraining(); _neuronsWeight = _basicNetwork.Structure.Network.Flat.Weights.Select(x => System.Convert.ToDouble(x)).ToList(); Make2DNeuronsWeightsMap(); foreach (IMLDataPair pair in _trainingSet) { IMLData output = _basicNetwork.Compute(pair.Input); Console.WriteLine("Input: " + pair.Input[0] + @" - " + pair.Input[1] + @" - " + pair.Input[2]); Console.WriteLine("Output 0: - actual=" + output[0] + @"-ideal=" + pair.Ideal[0]); Console.WriteLine("Output 1: - actual=" + output[1] + @"-ideal=" + pair.Ideal[1]); Console.WriteLine("Output 2: - actual=" + output[2] + @"-ideal=" + pair.Ideal[2]); } return(NetworkState.TRAINED); }
/// <summary> /// Measure the performance of the network /// </summary> /// <param name = "network">Network to analyze</param> /// <param name = "dataset">Dataset with input and ideal data</param> /// <returns>Error % of correct bits, returned by the network.</returns> public static double MeasurePerformance(BasicNetwork network, BasicNeuralDataSet dataset) { int correctBits = 0; float threshold; IActivationFunction activationFunction = network.GetActivation(network.LayerCount - 1); // get the activation function of the output layer if (activationFunction is ActivationSigmoid) { threshold = 0.5f; /* > 0.5, range of sigmoid [0..1]*/ } else if (activationFunction is ActivationTANH) { threshold = 0.0f; /*> 0, range of bipolar sigmoid is [-1..1]*/ } else { throw new ArgumentException("Bad activation function"); } int n = (int)dataset.Count; Parallel.For( 0, n, i => { IMLData actualOutputs = network.Compute(dataset.Data[i].Input); lock (LockObject) { for (int j = 0, k = actualOutputs.Count; j < k; j++) { if ((actualOutputs[j] > threshold && dataset.Data[i].Ideal[j] > threshold) || (actualOutputs[j] < threshold && dataset.Data[i].Ideal[j] < threshold)) { correctBits++; } } } }); long totalOutputBitsCount = dataset.Count * dataset.Data[0].Ideal.Count; return((double)correctBits / totalOutputBitsCount); }
public static BasicNeuralDataSet LoadMonksData(string datasetLocation) { string[] lines = File.ReadAllLines(datasetLocation); double[][] input = new double[lines.Length][]; double[][] output = new double[lines.Length][]; for (int i = 0; i < lines.Length; i++) { var line = lines[i].Trim().Split(' '); input[i] = new double[17]; input[i][0] = line[1] == "1" ? 1 : 0; input[i][1] = line[1] == "2" ? 1 : 0; input[i][2] = line[1] == "3" ? 1 : 0; input[i][3] = line[2] == "1" ? 1 : 0; input[i][4] = line[2] == "2" ? 1 : 0; input[i][5] = line[2] == "3" ? 1 : 0; input[i][6] = line[3] == "1" ? 1 : 0; input[i][7] = line[3] == "2" ? 1 : 0; input[i][8] = line[4] == "1" ? 1 : 0; input[i][9] = line[4] == "2" ? 1 : 0; input[i][10] = line[4] == "3" ? 1 : 0; input[i][11] = line[5] == "1" ? 1 : 0; input[i][12] = line[5] == "2" ? 1 : 0; input[i][13] = line[5] == "3" ? 1 : 0; input[i][14] = line[5] == "4" ? 1 : 0; input[i][15] = line[6] == "1" ? 1 : 0; input[i][16] = line[6] == "2" ? 1 : 0; output[i] = new double[] { double.Parse((line[0])) }; } BasicNeuralDataSet trainingSet = new BasicNeuralDataSet(input, output); return(trainingSet); }
public void LoadCSVFileTrainingRegg() { var path = parameters.TrainFile; var csvRead = new ReadCSV(new FileStream(path, FileMode.Open), true, CSVFormat.DecimalPoint); var valuesX = new List <double[]>(); var valuesY = new List <double[]>(); while (csvRead.Next()) { valuesX.Add(new double[1] { csvRead.GetDouble(0) }); valuesY.Add(new double[1] { csvRead.GetDouble(1) }); } csvRead.Close(); var min = parameters.FunctionType == FunctionTypeEnum.Unipolar ? 0d : -1d; ArgNormalize = new NormalizedField[] { new NormalizedField(NormalizationAction.Normalize, "X", valuesX.Max(v => v[0]), valuesX.Min(v => v[0]), 1.0, min) }; YNormalize = new NormalizedField(NormalizationAction.Normalize, "Y", valuesY.Max(v => v[0]), valuesY.Min(v => v[0]), 1.0, min); for (int i = 0; i < valuesX.Count; i++) { valuesX[i][0] = ArgNormalize[0].Normalize(valuesX[i][0]); valuesY[i][0] = YNormalize.Normalize(valuesY[i][0]); } //if(parameters.FunctionType==FunctionTypeEnum.Bipolar) // ansMeans = means[0]; //ansfactor = factor[0]; //ansMIn = Min[0]; var trainSetCount = (int)((double)valuesX.Count * ((100.0 - 15) / 100)); valuesX.Shuffle(); valuesY.Shuffle(); MyExtensions.ResetStableShuffle(); TrainSet = new BasicNeuralDataSet(valuesX.Take(trainSetCount).ToArray(), valuesY.Take(trainSetCount).ToArray()); ValidationSet = new BasicNeuralDataSet(valuesX.Skip(trainSetCount).ToArray(), valuesY.Skip(trainSetCount).ToArray()); }
private BasicNeuralDataSet PrepareClassificationSet(double[][] data) { var input = data.Select(row => row.Take(_inputLayerSize).ToArray()).ToArray(); var idealTmp = data.Select(row => row.Skip(_inputLayerSize).ToArray()).ToArray(); var normalizedInput = _normalizer.Normalize(input); var ideal = new double[idealTmp.Length][]; // Dzielimy wynik na klasy, "uaktywniając" odpowiadającą klasie kolumnę for (int i = 0; i < idealTmp.Length; ++i) { ideal[i] = Enumerable.Repeat(GetNormalizationLowValue(), _outputLayerSize).ToArray(); ideal[i][(int)idealTmp[i][0] - 1] = 1; } var trainingSet = new BasicNeuralDataSet(normalizedInput, ideal); return(trainingSet); }