public void TestRPROPCont() { IMLDataSet trainingSet = XOR.CreateXORDataSet(); BasicNetwork net1 = XOR.CreateUnTrainedXOR(); BasicNetwork net2 = XOR.CreateUnTrainedXOR(); ResilientPropagation rprop1 = new ResilientPropagation(net1, trainingSet); ResilientPropagation rprop2 = new ResilientPropagation(net2, trainingSet); rprop1.Iteration(); rprop1.Iteration(); rprop2.Iteration(); rprop2.Iteration(); TrainingContinuation cont = rprop2.Pause(); ResilientPropagation rprop3 = new ResilientPropagation(net2, trainingSet); rprop3.Resume(cont); rprop1.Iteration(); rprop3.Iteration(); for (int i = 0; i < net1.Flat.Weights.Length; i++) { Assert.AreEqual(net1.Flat.Weights[i], net2.Flat.Weights[i], 0.0001); } }
public void TestRPROPContPersistEG() { IMLDataSet trainingSet = XOR.CreateXORDataSet(); BasicNetwork net1 = XOR.CreateUnTrainedXOR(); BasicNetwork net2 = XOR.CreateUnTrainedXOR(); ResilientPropagation rprop1 = new ResilientPropagation(net1, trainingSet); ResilientPropagation rprop2 = new ResilientPropagation(net2, trainingSet); rprop1.Iteration(); rprop1.Iteration(); rprop2.Iteration(); rprop2.Iteration(); TrainingContinuation cont = rprop2.Pause(); EncogDirectoryPersistence.SaveObject(EG_FILENAME, cont); TrainingContinuation cont2 = (TrainingContinuation)EncogDirectoryPersistence.LoadObject(EG_FILENAME); ResilientPropagation rprop3 = new ResilientPropagation(net2, trainingSet); rprop3.Resume(cont2); rprop1.Iteration(); rprop3.Iteration(); for (int i = 0; i < net1.Flat.Weights.Length; i++) { Assert.AreEqual(net1.Flat.Weights[i], net2.Flat.Weights[i], 0.0001); } }
static void Main(string[] args) { //create a neural network withtout using a factory var network = new BasicNetwork(); network.AddLayer(new BasicLayer(null, true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1)); network.Structure.FinalizeStructure(); network.Reset(); IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal); IMLTrain train = new ResilientPropagation(network, trainingSet); int epoch = 1; do { train.Iteration(); Console.WriteLine($"Epoch #{epoch} Error: {train.Error}"); epoch++; } while (train.Error > 0.01); train.FinishTraining(); Console.WriteLine("Neural Network Results:"); foreach (IMLDataPair iPair in trainingSet) { IMLData output = network.Compute(iPair.Input); Console.WriteLine($"{iPair.Input[0]}, {iPair.Input[0]}, actual={output[0]}, ideal={iPair.Ideal[0]}"); } EncogFramework.Instance.Shutdown(); Console.ReadKey(); }
static void Main(string[] args) { var network = new BasicNetwork(); network.AddLayer(new BasicLayer(null, true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1)); network.Structure.FinalizeStructure(); network.Reset(); var trainingSet = new BasicMLDataSet(XORInput, XORIdeal); var train = new ResilientPropagation(network, trainingSet); var epoch = 1; do { train.Iteration(); } while (train.Error > 0.01); train.FinishTraining(); foreach (var pair in trainingSet) { var output = network.Compute(pair.Input); Console.WriteLine(pair.Input[0] + @", " + pair.Input[1] + @" , actual=" + output[0] + @", ideal=" + pair.Ideal[0]); } EncogFramework.Instance.Shutdown(); Console.ReadLine(); }
private void Preprocessing_Completed(object sender, RunWorkerCompletedEventArgs e) { worker.ReportProgress(0, "Creating Network..."); BasicNetwork Network = new BasicNetwork(); Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, DataContainer.NeuralNetwork.Data.InputSize)); Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 50)); Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, DataContainer.NeuralNetwork.Data.IdealSize)); Network.Structure.FinalizeStructure(); Network.Reset(); DataContainer.NeuralNetwork.Network = Network; ResilientPropagation training = new ResilientPropagation(DataContainer.NeuralNetwork.Network, DataContainer.NeuralNetwork.Data); worker.ReportProgress(0, "Running Training: Epoch 0"); for(int i = 0; i < 200; i++) { training.Iteration(); worker.ReportProgress(0, "Running Training: Epoch " + (i+1).ToString() + " Current Training Error : " + training.Error.ToString()); if(worker.CancellationPending == true) { completed = true; return; } } completed = true; }
public void TestRPROPConsistency() { IMLDataSet training = EncoderTrainingFactory.generateTraining(4, false); var network = EncogUtility.SimpleFeedForward(4, 2, 0, 4, true); (new ConsistentRandomizer(-1, 1, 50)).Randomize(network); var rprop = new ResilientPropagation(network, training); for (var i = 0; i < 5; i++) { rprop.Iteration(); } Assert.IsTrue(CompareArray.Compare(ExpectedWeights1, network.Flat.Weights,0.00001)); for (var i = 0; i < 5; i++) { rprop.Iteration(); } Assert.IsTrue(CompareArray.Compare(ExpectedWeights2, network.Flat.Weights, 0.00001)); var e = network.CalculateError(training); Assert.AreEqual(0.0767386807494191, e, 0.00001); }
public ResilientPropagation TrainNetwork(BasicNetwork network, BasicMLDataSet trainingData) { var trainedNetwork = new ResilientPropagation(network, trainingData); var epoch = 0; do { trainedNetwork.Iteration(); epoch++; Console.WriteLine("Epoch:{0}, Error{1}", epoch, trainedNetwork.Error); } while (trainedNetwork.Error > 0.01); return trainedNetwork; }
/// <summary> /// Evaluate how long it takes to calculate the error for the network. This /// causes each of the training pairs to be run through the network. The /// network is evaluated 10 times and the lowest time is reported. /// </summary> /// <param name="network">The training data to use.</param> /// <param name="training">The number of seconds that it took.</param> /// <returns></returns> public static int EvaluateTrain(BasicNetwork network, IMLDataSet training) { // train the neural network IMLTrain train = new ResilientPropagation(network, training); int iterations = 0; var watch = new Stopwatch(); watch.Start(); while (watch.ElapsedMilliseconds < (10*Milis)) { iterations++; train.Iteration(); } return iterations; }
public double EvaluateMPROP(BasicNetwork network, IMLDataSet data) { var train = new ResilientPropagation(network, data); long start = DateTime.Now.Ticks; Console.WriteLine(@"Training 20 Iterations with MPROP"); for (int i = 1; i <= 20; i++) { train.Iteration(); Console.WriteLine("Iteration #" + i + " Error:" + train.Error); } //train.finishTraining(); long stop = DateTime.Now.Ticks; double diff = new TimeSpan(stop - start).Seconds; Console.WriteLine("MPROP Result:" + diff + " seconds."); Console.WriteLine("Final MPROP error: " + network.CalculateError(data)); return diff; }
public static int EvaluateTrain(BasicNetwork network, IMLDataSet training) { int num; IMLTrain train = new ResilientPropagation(network, training); if (0 == 0) { num = 0; } Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); while (stopwatch.ElapsedMilliseconds < 0x2710L) { num++; train.Iteration(); } return num; }
static void Main(string[] args) { double[][] XOR_Input = { new[] {0.0,0.0}, new[] {1.0,0.0}, new[] {0.0,1.0}, new[] {1.0,1.0} }; double[][] XOR_Ideal = { new[] {0.0}, new[] {1.0}, new[] {1.0}, new[] {0.0} }; var trainingSet = new BasicMLDataSet(XOR_Input, XOR_Ideal); BasicNetwork network = CreateNetwork(); var train = new ResilientPropagation(network, trainingSet); int epoch = 1; do { train.Iteration(); epoch++; Console.WriteLine("Iteration No :{0}, Error: {1}", epoch, train.Error); } while (train.Error > 0.001); foreach (var item in trainingSet) { var output = network.Compute(item.Input); Console.WriteLine("Input : {0}, {1} Ideal : {2} Actual : {3}", item.Input[0], item.Input[1], item.Ideal[0], output[0]); } Console.WriteLine("press any key to exit..."); Console.ReadLine(); }
/// <summary> /// Evaluate how long it takes to calculate the error for the network. This /// causes each of the training pairs to be run through the network. The /// network is evaluated 10 times and the lowest time is reported. /// </summary> /// <param name="network">The training data to use.</param> /// <param name="training">The number of seconds that it took.</param> /// <returns></returns> public static int EvaluateTrain(BasicNetwork network, IMLDataSet training) { // train the neural network IMLTrain train = new ResilientPropagation(network, training); int iterations = 0; const int milis10 = Milis * 10; var watch = new Stopwatch(); watch.Start(); while (true) { iterations++; train.Iteration(); if((iterations & 0xff) == 0 && watch.ElapsedMilliseconds < milis10) break; } return iterations; }
public double Run(List<int> topoplogy, int iterations) { _Network = new BasicNetwork(); _Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, _Features)); foreach (int layer in topoplogy) { _Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, layer)); } _Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1)); _Network.Structure.FinalizeStructure(); _Network.Reset(); //Encog.Neural.Networks.Training.Propagation.Gradient. ITrain train = new ResilientPropagation(_Network, _TrainingSet); for (int i = 0; i < iterations; i++) { train.Iteration(); } return train.Error; }
public static int Evaluate(BasicNetwork network, IMLDataSet training) { ResilientPropagation rprop = new ResilientPropagation(network, training); int iterations = 0; for (; ; ) { rprop.Iteration(); iterations++; if (rprop.Error < TARGET_ERROR) { return iterations; } if (iterations > 1000) { iterations = 0; return -1; } } }
private static void XORTest() { double[][] XOR_Input = { new[] {0.0, 0.0}, new[] {1.0, 0.0}, new[] {0.0, 1.0}, new[] {1.0, 1.0} }; double[][] XOR_Ideal = { new[] {0.0}, new[] {1.0}, new[] {1.0}, new[] {0.0} }; var trainingSet = new BasicMLDataSet(XOR_Input, XOR_Ideal); var network = CreateNetwork(); var train = new ResilientPropagation(network, trainingSet); int epoch = 1; do { train.Iteration(); epoch++; Console.WriteLine($"Iteration No: {epoch}, Error: {train.Error}"); } while (train.Error > 0.001); foreach (var item in trainingSet) { var output = network.Compute(item.Input); Console.WriteLine($"Input : {item.Input[0]}, {item.Input[1]}, Ideal: {item.Ideal[0]}, Actual : {output[0]}"); } }
private static void Main(string[] args) { // create a neural network, without using a factory var network = new BasicNetwork(); network.AddLayer(new BasicLayer(null, true, 2)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 3)); network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1)); network.Structure.FinalizeStructure(); network.Reset(); // create training data IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal); // train the neural network IMLTrain train = new ResilientPropagation(network, trainingSet); int epoch = 1; do { train.Iteration(); Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error); epoch++; } while (train.Error > 0.01); train.FinishTraining(); // test the neural network Console.WriteLine(@"Neural Network Results:"); foreach (IMLDataPair pair in trainingSet) { IMLData output = network.Compute(pair.Input); Console.WriteLine(pair.Input[0] + @"," + pair.Input[1] + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]); } EncogFramework.Instance.Shutdown(); }
public void Perform(int thread) { var stopwatch = new Stopwatch(); stopwatch.Start(); var network = new BasicNetwork(); network.AddLayer(new BasicLayer(INPUT_COUNT)); network.AddLayer(new BasicLayer(HIDDEN_COUNT)); network.AddLayer(new BasicLayer(OUTPUT_COUNT)); network.Structure.FinalizeStructure(); network.Reset(); IMLDataSet training = RandomTrainingFactory.Generate(1000, 50000, INPUT_COUNT, OUTPUT_COUNT, -1, 1); var rprop = new ResilientPropagation(network, training); rprop.ThreadCount = thread; for (int i = 0; i < 5; i++) { rprop.Iteration(); } stopwatch.Stop(); Console.WriteLine("Result with " + thread + " was " + stopwatch.ElapsedMilliseconds + "ms"); }
void Train() { if (Memory.Count>0) { network.Reset(); double[][] InputData = new double[Memory.Count][]; //подготовка данных для обучения сети double[][] SenseData = new double[Memory.Count][]; for (int i = 0; i < Memory.Count; i++) { InputData[i] = Memory[i]; SenseData[i] = MemorySense[i]; } IMLDataSet trainingSet = new BasicMLDataSet(InputData, SenseData); IMLTrain train = new ResilientPropagation(network, trainingSet); int epoch = 1; double old = 9999; double d = 999; do { train.Iteration(); //Console.SetCursorPosition(0, 0); //вывод информации о текущем состоянии обучения //Console.Write(@"Epoch #" + epoch + @" Error:" + train.Error); epoch++; d = Math.Abs(old - train.Error); old = train.Error; } while (train.Error > 0.0001 && epoch < 3000 && d > 0.00001); train.FinishTraining(); //double sumd=0.0; //подсчет суммарной ошибки после обучения //foreach (IMLDataPair pair in trainingSet) //{ // IMLData output = network.Compute(pair.Input); // sumd = sumd + Math.Abs(pair.Ideal[0] - output[0]); // sumd = sumd / trainingSet.InputSize; //} } }
public void Train(BasicNetwork network, IMLDataSet training) { ITrain train = new ResilientPropagation(network, training); int epoch = 1; do { train.Iteration(); Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error); epoch++; } while (train.Error > MaxError); }
public void Train() { TrainingErrorData.Clear(); TestingIdealData.Clear(); TestingResultsData.Clear(); _network = ConstructNetwork(TrainingSet.InputSize,TrainingSet.IdealSize); //var trainer = new Backpropagation(_network, TrainingSet, LearningRate, Momentum); var trainer = new ResilientPropagation(_network, TrainingSet); double[] resultsArray = new double[TrainingSet.Count]; double[] errorArray = new double[NumberOfIterations]; IsBusy = true; for (int iteration = 0; iteration < numberOfIterations; iteration++) { trainer.Iteration(); TrainingErrorData.Add(new Tuple<int,double>(iteration, trainer.Error)); } IsBusy = false; for(int i = 0; i < TrainingSet.Count; i++) { resultsArray[i] = _network.Classify(TrainingSet[i].Input); } TrainingErrorValue = _network.CalculateError(TrainingSet); Stage = Stage.Trained; }
public void Train(NetData traindata, NetData testdata) { bool stop; double sr; double bestsr; string now; int totalepoch; int epoch; long timeId; string netfile; timeId = DateTime.Now.Millisecond + DateTime.Now.Year + DateTime.Now.Minute + DateTime.Now.Hour + DateTime.Now.Day; //// create training data IMLDataSet trainingSet = new BasicMLDataSet(traindata.Data, traindata.Targets); //// create training data IMLDataSet testingSet = new BasicMLDataSet(testdata.Data, testdata.Targets); //// train the neural network ResilientPropagation train = new ResilientPropagation(network, trainingSet); train.RType = rproptype; stop = false; bestsr = 0; totalepoch = 0; epoch = 10; int i = 0; now = DateTime.Now.ToString().Replace(":", "_").Replace("/", "_"); Console.WriteLine("Begin train. Inputs : " + testdata.ColCount + " Nodes : " + (network.Flat.NeuronCount -testdata.ColCount).ToString() ); do { train.Iteration(epoch); totalepoch=totalepoch + epoch; Console.WriteLine("total epoch" + totalepoch.ToString()); i = 0; foreach (IMLDataPair pair in testingSet) { IMLData output = network.Compute(pair.Input); testdata.Targets[i][0] = output[0]; i++; } sr = testdata.CalcTestResult(); if (sr > bestsr) { bestsr = sr; netfile = "E:/Users/Brian/netfiles/encog/Date_" + now + "_eph_" + totalepoch.ToString() + "_v" + timeId++.ToString() + ".net"; testdata.epochs = totalepoch; Save(netfile); Console.WriteLine(netfile); } else if(sr < bestsr - .005) { stop = true; } //Console.WriteLine("Epoch Error:" + train.Error); } while (stop == false); }
public void Train(IMLDataSet training) { ITrain train = new ResilientPropagation(network, training); //SVDTraining train = new SVDTraining(network, training); int epoch = 1; do { train.Iteration(); if ((epoch)%(iterations/10) == 0) Console.Write("."); epoch++; } while (epoch < iterations*100); }
/// <summary> /// Metodo responsavel por treinar a rede neural a uma taxa de erro de 1% /// </summary> private static void TrainNetwork() { var network = (BasicNetwork)EncogDirectoryPersistence.LoadObject(Config.TrainedNetworkClassificationFile); var trainingSet = EncogUtility.LoadCSV2Memory(Config.NormalizedTrainingClassificationFile.ToString(), network.InputCount, network.OutputCount, true, CSVFormat.English, false); var train = new ResilientPropagation(network, trainingSet); int epoch = 1; do { train.Iteration(); Console.WriteLine("Epoch : {0} Error : {1}", epoch, train.Error); epoch++; } while (train.Error > 0.01); EncogDirectoryPersistence.SaveObject(Config.TrainedNetworkClassificationFile, (BasicNetwork)network); }
/// <summary> /// Evaluate one network. /// </summary> /// <param name="context">The job context.</param> public override void PerformJobUnit(JobUnitContext context) { BasicNetwork network = (BasicNetwork)context.JobUnit; // train the neural network ITrain train = new ResilientPropagation(network, this.training); for (int i = 0; i < this.iterations; i++) { train.Iteration(); } double error = train.Error; if ((error < this.bestResult) || (this.bestNetwork == null)) { #if logging if (this.logger.IsDebugEnabled) { this.logger.Debug("Prune found new best network: error=" + error + ", network=" + network); } #endif this.bestNetwork = network; this.bestResult = error; } this.currentTry++; this.ReportStatus(context, "Current: " + PruneIncremental.NetworkToString(network) + ", Best: " + PruneIncremental.NetworkToString(this.bestNetwork)); }
public void Train(Network network, TrainingCallback callback) { IActivationFunction activationFunctionInput = network.GetActivation(0); int outputNeurons = network.GetLayerNeuronCount(network.LayerCount - 1); double error = 0; callback.Invoke(TrainingStatus.FillingStandardInputs, 0, 0, 0); /*First operation is filling standard input/outputs*/ Dictionary<int, List<BasicMLData>> trackIdFingerprints = GetNormalizedTrackFingerprints(activationFunctionInput, trainingSongSnippets, outputNeurons); workingThread = Thread.CurrentThread; IActivationFunction activationFunctionOutput = network.GetActivation(network.LayerCount - 1); double[][] normalizedBinaryCodes = GetNormalizedBinaryCodes(activationFunctionOutput, outputNeurons); Tuple<double[][], double[][]> tuple = FillStandardInputsOutputs(trackIdFingerprints, normalizedBinaryCodes); /*Fill standard input output*/ double[][] inputs = tuple.Item1; double[][] outputs = tuple.Item2; if (inputs == null || outputs == null) { callback.Invoke(TrainingStatus.Exception, 0, 0, 0); return; } int currentIterration = 0; double correctOutputs = 0.0; BasicNeuralDataSet dataset = new BasicNeuralDataSet(inputs, outputs); ITrain learner = new ResilientPropagation(network, dataset); try { // Dynamic output reordering cycle /*Idyn = 50*/ for (int i = 0; i < Idyn; i++) { if (paused) { pauseSem.WaitOne(); } correctOutputs = NetworkPerformanceMeter.MeasurePerformance(network, dataset); callback.Invoke(TrainingStatus.OutputReordering, correctOutputs, error, currentIterration); ReorderOutput(network, dataset, trackIdFingerprints, normalizedBinaryCodes); /*Edyn = 10*/ for (int j = 0; j < Edyn; j++) { if (paused) { pauseSem.WaitOne(); } correctOutputs = NetworkPerformanceMeter.MeasurePerformance(network, dataset); callback.Invoke(TrainingStatus.RunningDynamicEpoch, correctOutputs, error, currentIterration); learner.Iteration(); error = learner.Error; currentIterration++; } } for (int i = 0; i < Efixed; i++) { if (paused) { pauseSem.WaitOne(); } correctOutputs = NetworkPerformanceMeter.MeasurePerformance(network, dataset); callback.Invoke(TrainingStatus.FixedTraining, correctOutputs, error, currentIterration); learner.Iteration(); error = learner.Error; currentIterration++; } network.ComputeMedianResponses(inputs, trainingSongSnippets); callback.Invoke(TrainingStatus.Finished, correctOutputs, error, currentIterration); } catch (ThreadAbortException) { callback.Invoke(TrainingStatus.Aborted, correctOutputs, error, currentIterration); paused = false; } }
/// <summary> /// Perform an individual job unit, which is a single network to train and /// evaluate. /// </summary> /// /// <param name="context">Contains information about the job unit.</param> public override sealed void PerformJobUnit(JobUnitContext context) { var network = (BasicNetwork) context.JobUnit; BufferedMLDataSet buffer = null; IMLDataSet useTraining = _training; if (_training is BufferedMLDataSet) { buffer = (BufferedMLDataSet) _training; useTraining = (buffer.OpenAdditional()); } // train the neural network double error = Double.PositiveInfinity; for (int z = 0; z < _weightTries; z++) { network.Reset(); Propagation train = new ResilientPropagation(network, useTraining); var strat = new StopTrainingStrategy(0.001d, 5); train.AddStrategy(strat); train.ThreadCount = 1; // force single thread mode for (int i = 0; (i < _iterations) && !ShouldStop && !strat.ShouldStop(); i++) { train.Iteration(); } error = Math.Min(error, train.Error); } if (buffer != null) { buffer.Close(); } if (!ShouldStop) { // update min and max _high = Math.Max(_high, error); _low = Math.Min(_low, error); if (_hidden1Size > 0) { int networkHidden1Count; int networkHidden2Count; if (network.LayerCount > 3) { networkHidden2Count = network.GetLayerNeuronCount(2); networkHidden1Count = network.GetLayerNeuronCount(1); } else { networkHidden2Count = 0; networkHidden1Count = network.GetLayerNeuronCount(1); } int row, col; if (_hidden2Size == 0) { row = networkHidden1Count - _hidden[0].Min; col = 0; } else { row = networkHidden1Count - _hidden[0].Min; col = networkHidden2Count - _hidden[1].Min; } if ((row < 0) || (col < 0)) { Console.Out.WriteLine("STOP"); } _results[row][col] = error; } // report status _currentTry++; UpdateBest(network, error); ReportStatus( context, "Current: " + NetworkToString(network) + "; Best: " + NetworkToString(_bestNetwork)); } }
public List<double[]> Learn(double[][] data, double[][] ideal) { double[][] origData = (double[][])data.Clone(); int n = data.Length; int m = data[0].Length; double[][] output = new double[n][]; double[][] sgmNeighbours = new double[n][]; for (var i = 0; i < n; i++) { double[] sgmN = new double[SegmentationData.SEGMENT_NEIGHBOURS]; Array.Copy(data[i], m - SegmentationData.SEGMENT_NEIGHBOURS, sgmN, 0, SegmentationData.SEGMENT_NEIGHBOURS); sgmNeighbours[i] = sgmN; data[i] = data[i].Take(m - SegmentationData.SEGMENT_NEIGHBOURS).ToArray(); output[i] = new double[m - SegmentationData.SEGMENT_NEIGHBOURS]; data[i].CopyTo(output[i], 0); } IMLDataSet trainingSet = new BasicMLDataSet(data, output); int inputLayerSize = layersConfiguration[0] - SegmentationData.SEGMENT_NEIGHBOURS; int trainingLayerSize = layersConfiguration[1]; BasicNetwork oneLayerAutoencoder = new BasicNetwork(); oneLayerAutoencoder.AddLayer(new BasicLayer(null, BIAS, inputLayerSize)); oneLayerAutoencoder.AddLayer(new BasicLayer(CurrentActivationFunction(), BIAS, trainingLayerSize)); oneLayerAutoencoder.AddLayer(new BasicLayer(CurrentActivationFunction(), false, inputLayerSize)); oneLayerAutoencoder.Structure.FinalizeStructure(); oneLayerAutoencoder.Reset(); IMLTrain train = new ResilientPropagation(oneLayerAutoencoder, trainingSet); //IMLTrain train = new Backpropagation(oneLayerAutoencoder, trainingSet, LEARNING_RATE, MOMENTUM); int epoch = 1; List<double[]> errors = new List<double[]>(); double[] trainError = new double[AUTOENCODER_MAX_ITER]; do { train.Iteration(); ActiveForm.Text = @"Epoch #" + epoch + @" Error:" + train.Error; Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error); trainError[epoch - 1] = train.Error; epoch++; //errors.Add(train.Error); } while (train.Error > EPS && epoch < AUTOENCODER_MAX_ITER); errors.Add(trainError); train.FinishTraining(); BasicNetwork encoder = new BasicNetwork(); encoder.AddLayer(new BasicLayer(null, BIAS, oneLayerAutoencoder.GetLayerNeuronCount(0))); encoder.AddLayer(new BasicLayer(CurrentActivationFunction(), false, oneLayerAutoencoder.GetLayerNeuronCount(1))); encoder.Structure.FinalizeStructure(); encoder.Reset(); //przypisanie wag do encodera for (int i = 0; i < encoder.LayerCount - 1; i++) for (int f = 0; f < encoder.GetLayerNeuronCount(i); f++) for (int t = 0; t < encoder.GetLayerNeuronCount(i + 1); t++) encoder.SetWeight(i, f, t, oneLayerAutoencoder.GetWeight(i, f, t)); //Compare2Networks(oneLayerAutoencoder, encoder); for(int l=1; l<layersConfiguration.Count -2; l++) { inputLayerSize = layersConfiguration[l]; trainingLayerSize = layersConfiguration[l+1]; oneLayerAutoencoder = new BasicNetwork(); oneLayerAutoencoder.AddLayer(new BasicLayer(null, BIAS, inputLayerSize)); oneLayerAutoencoder.AddLayer(new BasicLayer(CurrentActivationFunction(), BIAS, trainingLayerSize)); oneLayerAutoencoder.AddLayer(new BasicLayer(CurrentActivationFunction(), false, inputLayerSize)); oneLayerAutoencoder.Structure.FinalizeStructure(); oneLayerAutoencoder.Reset(); //liczenie outputu z dotychczasowego encodera double[][] input = new double[n][]; double[][] newOutput = new double[n][]; for(int ni = 0; ni <n; ni++) { IMLData res = encoder.Compute(new BasicMLData(data[ni])); double[] resD = new double[res.Count]; for(int i=0; i<res.Count; i++) resD[i] = res[i]; input[ni] = resD; newOutput[ni] = new double[res.Count]; input[ni].CopyTo(newOutput[ni], 0); } BasicMLDataSet newTrainingSet = new BasicMLDataSet(input, newOutput); train = new ResilientPropagation(oneLayerAutoencoder, newTrainingSet); //train = new Backpropagation(oneLayerAutoencoder, newTrainingSet, LEARNING_RATE, MOMENTUM); epoch = 1; trainError = new double[AUTOENCODER_MAX_ITER]; do { train.Iteration(); ActiveForm.Text = @"Epoch #" + epoch + @" Error:" + train.Error; Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error); trainError[epoch - 1] = train.Error; epoch++; } while (train.Error > EPS && epoch < AUTOENCODER_MAX_ITER); errors.Add(trainError); train.FinishTraining(); BasicNetwork extendedEncoder = new BasicNetwork(); extendedEncoder.AddLayer(new BasicLayer(null, BIAS, encoder.GetLayerNeuronCount(0))); for (int el = 1; el < encoder.LayerCount; el++ ) extendedEncoder.AddLayer(new BasicLayer(CurrentActivationFunction(), BIAS, encoder.GetLayerNeuronCount(el))); extendedEncoder.AddLayer(new BasicLayer(CurrentActivationFunction(), false, oneLayerAutoencoder.GetLayerNeuronCount(1))); extendedEncoder.Structure.FinalizeStructure(); //przypisanie wag do extendedencodera for (int i = 0; i < extendedEncoder.LayerCount - 1; i++) { if (i < encoder.LayerCount-1) { for (int f = 0; f < extendedEncoder.GetLayerNeuronCount(i); f++) for (int t = 0; t < extendedEncoder.GetLayerNeuronCount(i + 1); t++) extendedEncoder.SetWeight(i, f, t, encoder.GetWeight(i, f, t)); } else { for (int f = 0; f < extendedEncoder.GetLayerNeuronCount(i); f++) for (int t = 0; t < extendedEncoder.GetLayerNeuronCount(i + 1); t++) extendedEncoder.SetWeight(i, f, t, oneLayerAutoencoder.GetWeight(0, f, t)); } } encoder = extendedEncoder; } //tworzenie struktury ostatecznej sieci network = new BasicNetwork(); network.AddLayer(new BasicLayer(null, BIAS, encoder.GetLayerNeuronCount(0) + SegmentationData.SEGMENT_NEIGHBOURS)); for (int el = 1; el < encoder.LayerCount; el++) network.AddLayer(new BasicLayer(CurrentActivationFunction(), BIAS, encoder.GetLayerNeuronCount(el) + SegmentationData.SEGMENT_NEIGHBOURS)); network.AddLayer(new BasicLayer(CurrentActivationFunction(), false, layersConfiguration[layersConfiguration.Count - 1])); network.Structure.FinalizeStructure(); network.Reset(); /* for (int i = 0; i < encoder.LayerCount - 1; i++) for (int f = 0; f < encoder.GetLayerNeuronCount(i); f++) for (int t = 0; t < encoder.GetLayerNeuronCount(i + 1); t++) network.SetWeight(i, f, t, encoder.GetWeight(i, f, t)); */ //dla innych ustawic wagi 0, dla samych sobie 1 for (int i = 0; i < encoder.LayerCount - 1; i++) for (int f = 0; f < network.GetLayerNeuronCount(i); f++) for (int t = 0; t < network.GetLayerNeuronCount(i + 1); t++) { if (f < encoder.GetLayerNeuronCount(i) && t >= encoder.GetLayerNeuronCount(i + 1)) network.SetWeight(i, f, t, 0); else if (f >= encoder.GetLayerNeuronCount(i) && t < encoder.GetLayerNeuronCount(i + 1)) network.SetWeight(i, f, t, 0); else if (f >= encoder.GetLayerNeuronCount(i) && t >= encoder.GetLayerNeuronCount(i + 1)) network.SetWeight(i, f, t, 1); else network.SetWeight(i, f, t, encoder.GetWeight(i, f, t)); } //uczenie koncowej sieci trainingSet = new BasicMLDataSet(origData, ideal); train = new ResilientPropagation(network, trainingSet); //train = new Backpropagation(network, trainingSet, LEARNING_RATE, MOMENTUM); epoch = 1; trainError = new double[FINAL_NETWORK_MAX_ITER]; do { train.Iteration(); ActiveForm.Text = @"Epoch #" + epoch + @" Error:" + train.Error; Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error); trainError[epoch - 1] = train.Error; epoch++; } while (train.Error > EPS && epoch < FINAL_NETWORK_MAX_ITER); errors.Add(trainError); train.FinishTraining(); try { string networkFileName = "autoencoder wo cmp 300 125 50 3"; EncogDirectoryPersistence.SaveObject(new FileInfo(networkFileName), network); MessageBox.Show("NETWORK SAVED TO FILE " + networkFileName); } catch (Exception ex) { MessageBox.Show(ex.Message); } return errors; }
public int Train(DataSet dataSet) { Network = new BasicNetwork(); Network.AddLayer(new BasicLayer(null, true, 8 * 21)); var first = ((8 * 21 + 4) * FirstLayerParameter); Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, (int)first)); var second = ((8 * 21 + 4) * SecondLayerParameter); Network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, (int)second)); Network.AddLayer(new BasicLayer(null, false, 1)); // Network.AddLayer(new ); Network.Structure.FinalizeStructure(); Network.Reset(); //IMLData x = new BasicNeuralData(); var set = new double[dataSet.Signatures.Count + dataSet.Forgeries.Count][]; var ideal = new double[dataSet.Signatures.Count + dataSet.Forgeries.Count][]; for (int i = 0; i < dataSet.Signatures.Count; i++) { set[i] = dataSet.Signatures[i].Data.Cast<double>().ToArray(); ideal[i] = new double[] {1}; } for (int i = dataSet.Signatures.Count; i < dataSet.Signatures.Count + dataSet.Forgeries.Count; i++) { set[i] = dataSet.Forgeries[i- dataSet.Signatures.Count].Data.Cast<double>().ToArray(); ideal[i] = new double[] { 0 }; } IMLDataSet trainingSet = new BasicMLDataSet(set, ideal); IMLTrain train = new ResilientPropagation(Network, trainingSet); int epoch = 1; var errors = new List<double>(); do { train.Iteration(); // Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error); epoch++; errors.Add(train.Error); } while ( epoch < 10000); train.FinishTraining(); return 1; }
/// <summary> /// This is based off of this article: /// http://www.codeproject.com/Articles/54575/An-Introduction-to-Encog-Neural-Networks-for-C /// </summary> /// <remarks> /// Go here for documentation of encog: /// http://www.heatonresearch.com/wiki /// /// Download link: /// https://github.com/encog/encog-dotnet-core/releases /// </remarks> private void btnXOR_Click(object sender, RoutedEventArgs e) { try { _trainingData = null; _results = null; BasicNetwork network = new BasicNetwork(); #region Create nodes // Create the network's nodes //NOTE: Using ActivationSigmoid, because there are no negative values. If there were negative, use ActivationTANH //http://www.heatonresearch.com/wiki/Activation_Function //NOTE: ActivationSigmoid (0 to 1) and ActivationTANH (-1 to 1) are pure but slower. A cruder but faster function is ActivationElliott (0 to 1) and ActivationElliottSymmetric (-1 to 1) //http://www.heatonresearch.com/wiki/Elliott_Activation_Function network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2)); // input layer network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 6)); // hidden layer network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1)); // output layer network.Structure.FinalizeStructure(); // Randomize the links network.Reset(); #endregion #region Training data // Neural networks must be trained before they are of any use. To train this neural network, we must provide training // data. The training data is the truth table for the XOR operator. The XOR has the following inputs: double[][] xor_input = new[] { new[] { 0d, 0d }, new[] { 1d, 0d }, new[] { 0d, 1d }, new[] { 1d, 1d }, }; // And the expected outputs double[][] xor_ideal_output = new[] { new[] { 0d }, new[] { 1d }, new[] { 1d }, new[] { 0d }, }; _trainingData = GetDrawDataFromTrainingData(xor_input, xor_ideal_output); #endregion #region Train network INeuralDataSet trainingSet = new BasicNeuralDataSet(xor_input, xor_ideal_output); // This is a good general purpose training algorithm //http://www.heatonresearch.com/wiki/Training ITrain train = new ResilientPropagation(network, trainingSet); List<double> log = new List<double>(); int trainingIteration = 1; do { train.Iteration(); log.Add(train.Error); trainingIteration++; } while ((trainingIteration < 2000) && (train.Error > 0.001)); // Paste this into excel and chart it to see the error trend string logExcel = string.Join("\r\n", log); #endregion #region Test //NOTE: I initially ran a bunch of tests, but the network always returns exactly the same result when given the same inputs //var test = Enumerable.Range(0, 1000). // Select(o => new { In1 = _rand.Next(2), In2 = _rand.Next(2) }). var test = xor_input. Select(o => new { In1 = Convert.ToInt32(o[0]), In2 = Convert.ToInt32(o[1]) }). Select(o => new { o.In1, o.In2, Expected = XOR(o.In1, o.In2), NN = CallNN(network, o.In1, o.In2), }). Select(o => new { o.In1, o.In2, o.Expected, o.NN, Error = Math.Abs(o.Expected - o.NN) }). OrderByDescending(o => o.Error). ToArray(); #endregion #region Test intermediate values // It was only trained with inputs of 0 and 1. Let's see what it does with values in between var intermediates = Enumerable.Range(0, 1000). Select(o => new { In1 = _rand.NextDouble(), In2 = _rand.NextDouble() }). Select(o => new { o.In1, o.In2, NN = CallNN(network, o.In1, o.In2), }). OrderBy(o => o.In1). ThenBy(o => o.In2). //OrderBy(o => o.NN). ToArray(); #endregion #region Serialize/Deserialize // Serialize it string weightDump = network.DumpWeights(); double[] dumpArray = weightDump.Split(','). Select(o => double.Parse(o)). ToArray(); //TODO: Shoot through the layers, and store in some custom structure that can be serialized, then walked through to rebuild on deserialize //string[] layerDump = network.Structure.Layers. // Select(o => o.ToString()). // ToArray(); // Create a clone BasicNetwork clone = new BasicNetwork(); clone.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 2)); clone.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 6)); clone.AddLayer(new BasicLayer(new ActivationSigmoid(), true, 1)); clone.Structure.FinalizeStructure(); clone.DecodeFromArray(dumpArray); // Test the clone string cloneDump = clone.DumpWeights(); bool isSame = weightDump == cloneDump; var cloneTests = xor_input. Select(o => new { Input = o, NN = CallNN(clone, o[0], o[1]), }).ToArray(); #endregion #region Store results double[] matchValues = new[] { 0d, 1d }; double matchRange = .03; //+- 5% of target value would be considered a match _results = intermediates. Select(o => Tuple.Create(new Point(o.In1, o.In2), o.NN, IsMatch(o.NN, matchValues, matchRange))). ToArray(); #endregion } catch (Exception ex) { MessageBox.Show(ex.ToString(), this.Title, MessageBoxButton.OK, MessageBoxImage.Error); } finally { RedrawResults(); } }
public double Evaluate(BasicNetwork network, IMLDataSet training) { var rprop = new ResilientPropagation(network, training); double startingError = network.CalculateError(training); for (int i = 0; i < ITERATIONS; i++) { rprop.Iteration(); } double finalError = network.CalculateError(training); return startingError - finalError; }