Beispiel #1
0
        /*Funkcja z parametrow tworzy odpowiednia siec neuronowa
         */
        public ITrain CreateNeuronNetwork(INeuralDataSet trainingSet)
        {
            BasicNetwork network = new BasicNetwork();

            if (nHelp.problem == 0)
            {
                network.AddLayer(new BasicLayer(ActivationFunction, nHelp.bias, 2));
            }
            else
            {
                network.AddLayer(new BasicLayer(ActivationFunction, nHelp.bias, 1));
            }

            for (int i = 0; i < nHelp.layers - 2; i++)
            {
                network.AddLayer(new BasicLayer(ActivationFunction, nHelp.bias, nHelp.neurons));
            }

            if (nHelp.problem == 0)
            {
                network.AddLayer(new BasicLayer(ActivationFunction, false, 4));
            }
            else
            {
                network.AddLayer(new BasicLayer(ActivationFunction, false, 1));
            }

            network.Structure.FinalizeStructure();
            network.Reset();
            ITrain train = new Backpropagation(network, trainingSet, nHelp.learning, nHelp.momentum);

            return(train);
        }
        public override double Train(Data info, float lr, float mom)
        {
            IMLDataSet data = new BasicMLDataSet(info.InputData, info.OutputData);

            //Train network on data set, parameters (Network, dataset, learning rate, momentum).
            IMLTrain learner   = new Backpropagation(EncogNetwork, data, lr, mom);
            double   lastError = double.PositiveInfinity;

            do
            {
                if (learner.Error != 0)
                {
                    lastError = learner.Error;
                }

                int i = 0;

                while (i < 1000)
                {
                    learner.Iteration();
                    i++;
                }
            } while (lastError - learner.Error > 0.0000001);

            return(learner.Error);
        }
Beispiel #3
0
 protected override void CalculateDeltasErrorsForThisLayer(LayerEventArgs nextLayerEventArgs)
 {
     _DeltasErrorsOfNeurons = Backpropagation.GetDeltasOutputLayer(
         _OutputSignals[(int)nextLayerEventArgs.NumberOfActiveDataset],
         nextLayerEventArgs.ExpectedSignalsOutLayer[(int)nextLayerEventArgs.NumberOfActiveDataset],
         nextLayerEventArgs.ActivationFunc);
 }
Beispiel #4
0
        private double TrainNetwork(String what, BasicNetwork network, IMLDataSet trainingSet, string Method)
        {
            // train the neural network
            ICalculateScore score    = new TrainingSetScore(trainingSet);
            IMLTrain        trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);
            IMLTrain        trainMain;

            if (Method.Equals("Leven"))
            {
                Console.WriteLine("Using LevenbergMarquardtTraining");
                trainMain = new LevenbergMarquardtTraining(network, trainingSet);
            }
            else
            {
                trainMain = new Backpropagation(network, trainingSet);
            }

            var stop = new StopTrainingStrategy();

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);

            int epoch = 0;

            while (!stop.ShouldStop())
            {
                trainMain.Iteration();
                app.WriteLine("Training " + what + ", Epoch #" + epoch + " Error:" + trainMain.Error);
                epoch++;
            }
            return(trainMain.Error);
        }
Beispiel #5
0
    private static void RecurseNegativeSample(Layer layer, Layer previousLayer, int inputIndex, Dictionary <Node, double> backwardsPassDeltas, double momentumMagnitude)
    {
        if (!previousLayer.PreviousLayers.Any())
        {
            // case where previous layer is input
            NegativeSampleFirstHiddenLayer(layer, previousLayer, inputIndex, backwardsPassDeltas, momentumMagnitude);
            return;
        }

        var deltas = new Dictionary <Node, double>();

        foreach (var node in layer.Nodes)
        {
            var delta = Backpropagation.CalculateDelta(layer, backwardsPassDeltas, node);
            deltas.Add(node, delta);

            foreach (var(prevNode, weightForPrevNode) in node.Weights)
            {
                Backpropagation.UpdateNodeWeight(prevNode, weightForPrevNode, delta, momentumMagnitude);
            }

            foreach (var(_, weightForPrevLayer) in node.BiasWeights)
            {
                Backpropagation.UpdateBiasNodeWeight(weightForPrevLayer, delta, momentumMagnitude);
            }
        }

        foreach (var prevPrevLayer in previousLayer.PreviousLayers)
        {
            RecurseNegativeSample(previousLayer, prevPrevLayer, inputIndex, deltas, momentumMagnitude);
        }
    }
        private void trainNetworkBackprop()
        {
            Train train = new Backpropagation(this.network, this.input,
                                              this.ideal, 0.000001, 0.1);
            double lastError  = Double.MaxValue;
            int    epoch      = 1;
            int    lastAnneal = 0;

            do
            {
                train.Iteration();
                double error = train.Error;

                Console.WriteLine("Iteration(Backprop) #" + epoch + " Error:"
                                  + error);

                if (error > 0.05)
                {
                    if ((lastAnneal > 100) && (error > lastError || Math.Abs(error - lastError) < 0.0001))
                    {
                        trainNetworkAnneal();
                        lastAnneal = 0;
                    }
                }

                lastError = train.Error;
                epoch++;
                lastAnneal++;
            } while (train.Error > MAX_ERROR);
        }
Beispiel #7
0
        static void Main(string[] args)
        {
            int[] layout = { 1, 6, 1 };
            ActivationFunction[] activationFunctions = { Sigmoid(), Sigmoid() };
            FFANN ffann = new FFANN(layout, activationFunctions);

            int n = ffann.WeightCount();

            double[] weights = new double[n];
            Random   rnd     = new Random();

            for (int i = 0; i < n; ++i)
            {
                weights[i] = rnd.NextDouble();
            }
            ffann.SetWeights(weights);

            Dataset dataset = new Dataset();

            dataset.Read("dummyData.txt");
            Console.WriteLine(ffann.CalculateError(dataset));

            Backpropagation train = new Backpropagation(ffann, 0.1, dataset);

            train.MaxIteration = 100000;
            train.MaxError     = 1e-6;
            train.Train(1);

            for (int i = 0; i < dataset.Size; ++i)
            {
                Console.WriteLine(ffann.GetOutput(dataset.GetInput(i))[0].ToString("0.0000") + "  " + dataset.GetOutput(i)[0]);
            }
        }
Beispiel #8
0
        public void Train()
        {
            var        network     = createNetwork();
            IMLDataSet trainingSet = EncogUtility.LoadCSV2Memory(normFile, network.InputCount, 1, false, CSVFormat.English, false);

            IMLTrain train     = new Backpropagation(network, trainingSet);
            int      epoch     = 1;
            int      truecases = 0;

            do
            {
                train.Iteration();
                Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.05);
            train.FinishTraining();

            Console.WriteLine(@"Neural Network Results:");
            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine(@" actual=" + output[0] + @",ideal=" + pair.Ideal[0]);
                if (pair.Ideal[0] == 1 && output[0] > 0.5)
                {
                    truecases++;
                }
                else if (pair.Ideal[0] == 0 && output[0] < 0.5)
                {
                    truecases++;
                }
            }
            Console.WriteLine(truecases);
            SerializeObject.Save(networkFile, network);
        }
        private void trainNetworkBackprop()
        {
            // IMLTrain train = new Backpropagation(this.network, this.input,this.ideal, 0.000001, 0.1);

            IMLDataSet aset  = new BasicMLDataSet(input, ideal);
            int        epoch = 1;
            // train the neural network
            ICalculateScore      score     = new TrainingSetScore(aset);
            IMLTrain             trainAlt  = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);
            IMLTrain             trainMain = new Backpropagation(network, aset, 0.001, 0.0);
            StopTrainingStrategy stop      = new StopTrainingStrategy();
            var pop = new NEATPopulation(INPUT_SIZE, OUTPUT_SIZE, 1000);
            // train the neural network
            var step = new ActivationStep();

            step.Center = 0.5;
            pop.OutputActivationFunction = step;
            var train = new NEATTraining(score, pop);

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);
            trainMain.AddStrategy(new HybridStrategy(train));


            network.ClearContext();

            while (!stop.ShouldStop())
            {
                trainMain.Iteration();
                train.Iteration();
                Console.WriteLine(@"Training " + @"Epoch #" + epoch + @" Error:" + trainMain.Error + @" Genetic iteration:" + trainAlt.IterationNumber + @"neat iteration:" + train.IterationNumber);
                epoch++;
            }
        }
Beispiel #10
0
        private void Train()
        {
            int        numOfLayers          = (int)MLNumOfLayersUpDown.Value;
            double     learningRate         = (double)MPLearningRateUpDown.Value;
            List <int> numOfNeuronsPerLayer = new List <int> ();

            int numOfEpocks = (int)MLNumOfEpocksUpDown.Value;
            LearningAlgorithm backpropagation = new Backpropagation();

            for (int i = 0; i < numOfLayers; ++i)
            {
                numOfNeuronsPerLayer.Add((i == 0) ? trainingSamples[0].Count : (i == numOfLayers - 1) ? 3 : int.Parse(NeuronsInHLayers[1, i - 1].Value.ToString()));
            }


            this.neuralNetwork = new FeedforwardNeuralNetwrok(numOfLayers);
            this.neuralNetwork.setNetwork(numOfNeuronsPerLayer);
            for (int i = 1; i < numOfLayers; ++i)
            {
                this.neuralNetwork.setLayer(i, new SigmoidFunction());
            }



            for (int i = 0; i < numOfEpocks; ++i)
            {
                this.neuralNetwork.train(trainingSamples, trainingLabels, learningRate, backpropagation);
            }
        }
        public static double GetNetworkDataSetError(Backpropagation network, INeuralDataSet dataSet, int answersSize)
        {
            double[] neuralAnswer = new double[dataSet.Count];
            int      i            = 0;

            foreach (var pair in dataSet)
            {
                double[] output = new double[answersSize];
                network.Network.Flat.Compute(pair.Input, output);
                if (answersSize != 0)
                {
                    double small = 0.0;
                    for (int r = 0; r < answersSize; r++)
                    {
                        if (output[r] >= small)
                        {
                            neuralAnswer[i] = r;
                            small           = output[r];
                        }
                    }
                }
                else
                {
                    neuralAnswer[i] = output[0];
                }
                i++;
            }
            int[] answers = DenormaliseAnswers(neuralAnswer, answersSize);
            //Console.WriteLine("Neural Network Results");
            double calculateError = CalculateFinalError(answers, dataSet, answersSize);

            return(calculateError);
        }
Beispiel #12
0
        public static ITrain CreateNeuronNetwork(INeuralDataSet learningSet, int inputSize, InputClass inputData)
        {
            BasicNetwork network = new BasicNetwork();

            //------------------------------------------------------------------------------------------

            int[] szerokosc = inputData.hiddenNeurons;
            int   dlugosc   = inputData.hiddenLayers;
            bool  bias      = inputData.bias;
            IActivationFunction ActivationFunction = inputData.activationFunction;

            double learning = inputData.learningFactor;
            double momentum = inputData.momentum;

            //-----------------------------------------------------------------------------------------

            network.AddLayer(new BasicLayer(ActivationFunction, bias, inputSize));

            for (int i = 0; i < dlugosc; i++)
            {
                network.AddLayer(new BasicLayer(ActivationFunction, bias, szerokosc[i]));
            }

            network.AddLayer(new BasicLayer(ActivationFunction, false, 4));

            network.Structure.FinalizeStructure();
            network.Reset();
            ITrain train = new Backpropagation(network, learningSet, learning, momentum);

            return(train);
        }
Beispiel #13
0
        public override void Backward(Tensor <Type> delta, Backpropagation bp)
        {
            argmax = argmax ?? Op.Argmax(x, Axis, keepDims: true);
            var deltaX = UnArgmax <Type> .Create(delta, argmax, Axis, x.Shape);

            bp.PushGradientTo(x, deltaX);
        }
        public static double TrainNetworks(BasicNetwork network, IMLDataSet minis)
        {
            // train the neural network
            ICalculateScore      score     = new TrainingSetScore(minis);
            IMLTrain             trainAlt  = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);
            IMLTrain             trainMain = new Backpropagation(network, minis, 0.0001, 0.01);
            StopTrainingStrategy stop      = new StopTrainingStrategy(0.0001, 200);

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);


            var sw = new Stopwatch();

            sw.Start();
            while (!stop.ShouldStop())
            {
                trainMain.Iteration();
                Console.WriteLine(@"Iteration #:" + trainMain.IterationNumber + @" Error:" + trainMain.Error + @" Genetic Iteration:" + trainAlt.IterationNumber);
            }
            sw.Stop();

            return(trainMain.Error);
        }
Beispiel #15
0
        public virtual double[][] getNetOutputValues(NeuralNet trainedNet)
        {
            int rows = trainedNet.TrainSet.Length;

            int cols = trainedNet.OutputLayer.NumberOfNeuronsInLayer;

            double[][] matrixOutputValues = RectangularArrays.ReturnRectangularDoubleArray(rows, cols);

            switch (trainedNet.trainType)
            {
            case TrainingTypesENUM.BACKPROPAGATION:
                Backpropagation b = new Backpropagation();

                for (int rows_i = 0; rows_i < rows; rows_i++)
                {
                    for (int cols_i = 0; cols_i < cols; cols_i++)
                    {
                        matrixOutputValues[rows_i][cols_i] = b.forward(trainedNet, rows_i).OutputLayer.ListOfNeurons[cols_i].OutputValue;
                    }
                }

                break;

            default:
                throw new System.ArgumentException(trainedNet.trainType + " does not exist in TrainingTypesENUM");
            }

            return(matrixOutputValues);
        }
Beispiel #16
0
        /*
         * Recupera a estrutura de uma RNA a partir de um arquivo
         */
        public static INeuralNetwork NeuralNetworkStructure(string file)
        {
            string json = ReadFile(file);

            ADReNA_API.Util.ExportImportCommon.CommonStructure stru = JsonConvert.DeserializeObject <ADReNA_API.Util.ExportImportCommon.CommonStructure>(json);
            INeuralNetwork ann = null;

            switch (stru.type)
            {
            case ExportImportCommon.AnnType.Backpropagation:
                ann = new Backpropagation(stru.inputLayerSize.Value, stru.outputLayerSize.Value, stru.hiddenLayerSizes);
                ((Backpropagation)ann).SetErrorRate(stru.error.Value);
                ((Backpropagation)ann).SetMaxIterationNumber(stru.iterationNumber.Value);
                ((Backpropagation)ann).SetLearningRate(stru.learningRate.Value);
                break;

            case ExportImportCommon.AnnType.Kohonen:
                ann = new Kohonen(stru.inputLayerSize.Value, stru.competitiveNeuronLength.Value, stru.maximumWeightRange.Value);
                ((Kohonen)ann).SetIterationNumber(stru.iterationNumber.Value);
                ((Kohonen)ann).SetLearningRate(stru.learningRate.Value);
                ((Kohonen)ann).SetNeighborhoodRadius(stru.neighborhoodRadius.Value);
                break;
            }

            return(ann);
        }
Beispiel #17
0
        private static void Main(string[] args)
        {
            NeuralNetwork Network = NeuralNetworkBuilder.StartBuild()
                                    .SetInitMethod(InitializationFunction.Random)
                                    .CreateInputLayer(2)
                                    .AddHiddenLayer(2, new Sigmoid())
                                    .CreateOutputLayer(1, new Sigmoid())
                                    .Build(new Random());

            //Set Test Data
            double[][] TestDataOutputs = new double[][]
            {
                new double[] { 0 },
                new double[] { 1 },
                new double[] { 1 },
                new double[] { 0 }
            };
            double[][] TestDataInputs = new double[][]
            {
                new double[] { 0, 0 },
                new double[] { 1, 0 },
                new double[] { 0, 1 },
                new double[] { 1, 1 }
            };

            double          Error    = 0;
            Backpropagation Backprop = new Backpropagation(Network);

            while (Backprop.EpochCount < 8000)
            {
                Error = Backprop.TrainEpoch(TestDataInputs, TestDataOutputs, TestDataInputs, TestDataOutputs);
            }
        }
        public void GetGradientsForWeights()
        {
            //Arrange
            float[] inputSignal = new float[]
            {
                0.95F, 0.55F, 0.90F
            };

            float[] deltasErrorThisLayer = new float[]
            {
                0.77F
            };

            float[][] expected = new float[][]
            {
                new float[] { 0.73149997F },
                new float[] { 0.4235F },
                new float[] { 0.692999959F }
            };

            //Act
            var actual = Backpropagation.GetGradientsForWeights(inputSignal, deltasErrorThisLayer);

            //Assert
            for (int i = 0; i < expected.Length; i++)
            {
                for (int j = 0; j < expected[i].Length; j++)
                {
                    Assert.AreEqual(expected[i][j], actual[i][j]);
                }
            }
        }
Beispiel #19
0
        public static void Train(PointsConverted pointsConvertedA, PointsConverted pointsConvertedB = null, bool debug = true)
        {
            network = new BasicNetwork();
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, N_input));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true, N_hidden));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, N_output));
            network.Structure.FinalizeStructure();
            network.Reset();

            pointsConvertedA.Juntar(pointsConvertedB);

            var trainingSet = new BasicNeuralDataSet(pointsConvertedA.entrada, pointsConvertedA.saida);
            var train       = new Backpropagation(network, trainingSet);

            var epoch = 0;

            do
            {
                train.Iteration();

                if (debug)
                {
                    Console.WriteLine("Epoch #" + epoch + " Error:" + train.Error);
                }

                epoch++;
            }while ((epoch <= 20000) || (train.Error > 0.001));
        }
Beispiel #20
0
        private double TrainNetwork(String what, BasicNetwork network, IMLDataSet trainingSet)
        {
            // train the neural network
            ICalculateScore score    = new TrainingSetScore(trainingSet);
            IMLTrain        trainAlt = new NeuralSimulatedAnnealing(
                network, score, 10, 2, 100);


            IMLTrain trainMain = new Backpropagation(network, trainingSet, 0.00001, 0.0);

            var stop = new StopTrainingStrategy();

            trainMain.AddStrategy(new Greedy());
            trainMain.AddStrategy(new HybridStrategy(trainAlt));
            trainMain.AddStrategy(stop);

            int epoch = 0;

            while (!stop.ShouldStop())
            {
                trainMain.Iteration();
                app.WriteLine("Training " + what + ", Epoch #" + epoch + " Error:" + trainMain.Error);
                epoch++;
            }
            return(trainMain.Error);
        }
Beispiel #21
0
        public override void Backward(Tensor <float> delta, Backpropagation bp)
        {
            Tensor <float> deltaX, deltaY;

            if (x.NDim > 2 || y.NDim > 2)
            {
                throw new NotImplementedException("Backward of tensor dot");
            }
            if (!TransposeX)
            {
                deltaX = Create(delta, y, false, !TransposeY);
            }
            else
            {
                deltaX = Create(y, delta, TransposeY, true);
            }
            bp.PushGradientTo(x, deltaX);

            if (!TransposeY)
            {
                deltaY = Create(x, delta, !TransposeX, false);
            }
            else
            {
                deltaY = Create(delta, x, true, TransposeX);
            }

            bp.PushGradientTo(y, deltaY);
        }
Beispiel #22
0
        public static long BenchmarkEncog(double[][] input, double[][] output)
        {
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true,
                                            input[0].Length));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), true,
                                            HIDDEN_COUNT));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false,
                                            output[0].Length));
            network.Structure.FinalizeStructure();
            network.Reset();

            IMLDataSet trainingSet = new BasicMLDataSet(input, output);

            // train the neural network
            IMLTrain train = new Backpropagation(network, trainingSet, 0.7, 0.7);

            var sw = new Stopwatch();

            sw.Start();
            // run epoch of learning procedure
            for (int i = 0; i < ITERATIONS; i++)
            {
                train.Iteration();
            }
            sw.Stop();

            return(sw.ElapsedMilliseconds);
        }
Beispiel #23
0
        static void Main(string[] args)
        {
            Application.EnableVisualStyles();
            Application.SetCompatibleTextRenderingDefault(false);

            int[] layout = ReadLayout(args[1]);
            var   activationFunctions = new ActivationFunction[layout.Length - 1];

            for (int i = 0; i < layout.Length - 1; ++i)
            {
                activationFunctions[i] = Sigmoid();
            }

            FFANN ffann = new FFANN(layout, activationFunctions);

            Console.WriteLine(ffann.WeightCount());

            int n = ffann.WeightCount();

            double[] weights = new double[n];
            Random   rnd     = new Random();

            for (int i = 0; i < n; ++i)
            {
                weights[i] = rnd.NextDouble();
            }
            ffann.SetWeights(weights);

            Dataset dataset = new PointDataset(layout[0] / 2);

            dataset.Read(args[0]);
            Console.WriteLine(ffann.CalculateError(dataset));

            Backpropagation train = new Backpropagation(ffann, 0.2, dataset);

            train.MaxIteration = 5000;
            train.MaxError     = 1e-6;

            int batchSize;

            switch (args[2])
            {
            case "1":
                batchSize = dataset.Size;
                break;

            case "2":
                batchSize = 1;
                break;

            default:
                batchSize = 20;
                break;
            }
            train.Train(batchSize);

            Application.Run(new Recognition(ffann));
        }
Beispiel #24
0
        public Task <Network> Train(NetworkConfiguration networkConfiguration, int[] spectralImagesIndexesToConsider, TrainingCallback callback)
        {
            var network = networkFactory.Create(
                networkConfiguration.ActivationFunction,
                DefaultFingerprintSize,
                networkConfiguration.HiddenLayerCount,
                networkConfiguration.OutputCount);

            var spectralImagesToTrain = trainingDataProvider.GetSpectralImagesToTrain(
                spectralImagesIndexesToConsider, (int)System.Math.Pow(2, networkConfiguration.OutputCount));
            var trainingSet = trainingDataProvider.MapSpectralImagesToBinaryOutputs(
                spectralImagesToTrain, networkConfiguration.OutputCount);

            normalizeStrategy.NormalizeInputInPlace(networkConfiguration.ActivationFunction, trainingSet.Inputs);
            normalizeStrategy.NormalizeOutputInPlace(networkConfiguration.ActivationFunction, trainingSet.Outputs);
            return(Task.Factory.StartNew(
                       () =>
            {
                var dataset = new BasicNeuralDataSet(trainingSet.Inputs, trainingSet.Outputs);
                var learner = new Backpropagation(network, dataset);
                double correctOutputs = 0.0;
                for (int idynIndex = 0; idynIndex < Idyn; idynIndex++)
                {
                    correctOutputs = networkPerformanceMeter.MeasurePerformance(
                        network, dataset, networkConfiguration.ActivationFunction);
                    callback(TrainingStatus.OutputReordering, correctOutputs, learner.Error, idynIndex * Edyn);
                    var bestPairs = GetBestPairsForReordering(
                        (int)System.Math.Pow(2, networkConfiguration.OutputCount), network, spectralImagesToTrain, trainingSet);
                    ReorderOutputsAccordingToBestPairs(bestPairs, trainingSet, dataset);

                    for (int edynIndex = 0; edynIndex < Edyn; edynIndex++)
                    {
                        correctOutputs = networkPerformanceMeter.MeasurePerformance(
                            network, dataset, networkConfiguration.ActivationFunction);
                        callback(
                            TrainingStatus.RunningDynamicEpoch,
                            correctOutputs,
                            learner.Error,
                            (idynIndex * Edyn) + edynIndex);
                        learner.Iteration();
                    }
                }

                for (int efixedIndex = 0; efixedIndex < Efixed; efixedIndex++)
                {
                    correctOutputs = networkPerformanceMeter.MeasurePerformance(
                        network, dataset, networkConfiguration.ActivationFunction);
                    callback(
                        TrainingStatus.FixedTraining, correctOutputs, learner.Error, (Idyn * Edyn) + efixedIndex);
                    learner.Iteration();
                }

                network.ComputeMedianResponses(trainingSet.Inputs, TrainingSongSnippets);
                callback(TrainingStatus.Finished, correctOutputs, learner.Error, (Idyn * Edyn) + Efixed);
                return network;
            }));
        }
Beispiel #25
0
        static void Main(string[] args)
        {
            // used for prediction of time series
            // sin(x) in theory
            int DEGREES     = 360;
            int WINDOW_SIZE = 16;

            double[][] Input = new double[DEGREES][];
            double[][] Ideal = new double[DEGREES][];

            // Create array of sin signals
            for (int i = 0; i < DEGREES; i++)
            {
                Input[i] = new double[WINDOW_SIZE];
                Ideal[i] = new double[] { Math.Sin(DegreeToRad(i + WINDOW_SIZE)) };
                for (int j = 0; j < WINDOW_SIZE; j++)
                {
                    Input[i][j] = Math.Sin(DegreeToRad(i + j));
                }
            }
            // construct training set
            IMLDataSet trainingSet = new BasicMLDataSet(Input, Ideal);

            // construct an Elman type network
            // simple recurrent network
            ElmanPattern pattern = new ElmanPattern
            {
                InputNeurons       = WINDOW_SIZE,
                ActivationFunction = new ActivationSigmoid(),
                OutputNeurons      = 1
            };

            pattern.AddHiddenLayer(WINDOW_SIZE);
            IMLMethod    method  = pattern.Generate();
            BasicNetwork network = (BasicNetwork)method;
            // Train network
            IMLTrain train = new Backpropagation(network, trainingSet);
            var      stop  = new StopTrainingStrategy();

            train.AddStrategy(new Greedy());
            train.AddStrategy(stop);
            int epoch = 0;

            while (!stop.ShouldStop())
            {
                train.Iteration();
                Console.WriteLine($"Training Epoch #{epoch} Error:{train.Error}");
                epoch++;
            }
            // Test network
            foreach (IMLDataPair pair in trainingSet)
            {
                IMLData output = network.Compute(pair.Input);
                Console.WriteLine($"actual={output[0]}, ideal={pair.Ideal[0]}");
            }
        }
Beispiel #26
0
        public void TestBPROP()
        {
            IMLDataSet trainingData = new BasicMLDataSet(XOR.XORInput, XOR.XORIdeal);

            BasicNetwork network = NetworkUtil.CreateXORNetworkUntrained();

            IMLTrain bprop = new Backpropagation(network, trainingData, 0.7, 0.9);

            NetworkUtil.TestTraining(bprop, 0.01);
        }
        public void InitMultilayerPerceptron()
        {
            int entradas = _dataFile.GetNInputs(_path);
            int salidas  = _dataFile.GetNOuputs(_path);
            int patrones = _dataFile.GetNPatterns(_path);

            double[,] mPatrones = _dataFile.ReadFileToMatriz(_path);

            _backpropagation = new Backpropagation(entradas, salidas, patrones, mPatrones);
        }
Beispiel #28
0
        public override void Backward(Tensor <T> delta, Backpropagation bp)
        {
            var xyz = EinsteinSplit(einsteinString);
            var zyx = $"{xyz.Item3},{xyz.Item2}->{xyz.Item1}";

            bp.PushGradientTo(x, new EinsteinSum <T>(delta, y, zyx));

            var xzy = $"{xyz.Item1},{xyz.Item3}->{xyz.Item2}";

            bp.PushGradientTo(y, new EinsteinSum <T>(x, delta, xzy));
        }
Beispiel #29
0
        public override void Backward(Tensor <T> delta, Backpropagation bp)
        {
            delta.AssertOfShape(Shape);
            var slices = Enumerable.Repeat(_, NDim).ToArray();

            for (int i = 0; i < _inputs.Length; ++i)
            {
                slices[_axis] = _slices[i];
                // slices is copied before being passed to the indexer
                bp.PushGradientTo(_inputs[i], delta[slices.ToArray()]);
            }
        }
Beispiel #30
0
 static void PrintBackpropHeader(Backpropagation BackpropTrainer, double BackpropError)
 {
     Console.ForegroundColor = ConsoleColor.White;
     Console.WriteLine($"Backpropagation Results: ");
     Console.ForegroundColor = ConsoleColor.Green;
     Console.Write($"Epoch Count: ");
     Console.ForegroundColor = ConsoleColor.Yellow;
     Console.WriteLine(BackpropTrainer.EpochCount);
     Console.ForegroundColor = ConsoleColor.Green;
     Console.Write("Error: ");
     Console.ForegroundColor = ConsoleColor.Red;
     Console.WriteLine($"{BackpropError:0.000000000}");
 }
Beispiel #31
0
 /// <summary>
 /// Initializes bias values of activation neurons in the activation layer.
 /// </summary>
 /// <param name="activationLayer">
 /// The activation layer to initialize
 /// </param>
 /// <exception cref="ArgumentNullException">
 /// If <c>activationLayer</c> is <c>null</c>
 /// </exception>
 public void Initialize(Backpropagation.ActivationLayer activationLayer)
 {
     Helper.ValidateNotNull(activationLayer, "layer");
     foreach (Backpropagation.ActivationNeuron neuron in activationLayer.Neurons)
     {
         neuron.bias = Helper.GetRandom(minLimit, maxLimit);
     }
 }
        /// <summary>
        /// Initializes bias values of activation neurons in the activation layer.
        /// </summary>
        /// <param name="activationLayer">
        /// The activation layer to initialize
        /// </param>
        /// <exception cref="ArgumentNullException">
        /// If <c>activationLayer</c> is <c>null</c>
        /// </exception>
        public void Initialize(Backpropagation.ActivationLayer activationLayer)
        {
            Helper.ValidateNotNull(activationLayer, "activationLayer");

            int hiddenNeuronCount = 0;
            foreach (IConnector targetConnector in activationLayer.TargetConnectors)
            {
                    hiddenNeuronCount += targetConnector.TargetLayer.NeuronCount;
            }

            double nGuyenWidrowFactor = NGuyenWidrowFactor(activationLayer.NeuronCount, hiddenNeuronCount);

            foreach (Backpropagation.ActivationNeuron neuron in activationLayer.Neurons)
            {
                neuron.bias = Helper.GetRandom(-nGuyenWidrowFactor, nGuyenWidrowFactor);
            }
        }
Beispiel #33
0
 /// <summary>
 /// Initializes bias values of activation neurons in the activation layer.
 /// </summary>
 /// <param name="activationLayer">
 /// The activation layer to initialize
 /// </param>
 /// <exception cref="ArgumentNullException">
 /// If <c>activationLayer</c> is <c>null</c>
 /// </exception>
 public void Initialize(Backpropagation.ActivationLayer activationLayer)
 {
     Helper.ValidateNotNull(activationLayer, "layer");
     foreach (ActivationNeuron neuron in activationLayer.Neurons)
     {
         neuron.bias = constant;
     }
 }