public void MulticlassTest1()
        {
            Accord.Math.Tools.SetupGenerator(0);
            // Neuron.RandGenerator = new ThreadSafeRandom(0);


            int numberOfInputs = 3;
            int numberOfClasses = 4;
            int hiddenNeurons = 5;

            double[][] input = 
            {
                new double[] { -1, -1, -1 }, // 0
                new double[] { -1,  1, -1 }, // 1
                new double[] {  1, -1, -1 }, // 1
                new double[] {  1,  1, -1 }, // 0
                new double[] { -1, -1,  1 }, // 2
                new double[] { -1,  1,  1 }, // 3
                new double[] {  1, -1,  1 }, // 3
                new double[] {  1,  1,  1 }  // 2
            };

            int[] labels =
            {
                0,
                1,
                1,
                0,
                2,
                3,
                3,
                2,
            };

            double[][] outputs = Accord.Statistics.Tools
                .Expand(labels, numberOfClasses, -1, 1);

            var function = new BipolarSigmoidFunction(2);
            var network = new ActivationNetwork(function,
                numberOfInputs, hiddenNeurons, numberOfClasses);

            new NguyenWidrow(network).Randomize();

            var teacher = new LevenbergMarquardtLearning(network);

            double error = Double.PositiveInfinity;
            for (int i = 0; i < 10; i++)
                error = teacher.RunEpoch(input, outputs);

            for (int i = 0; i < input.Length; i++)
            {
                int answer;
                double[] output = network.Compute(input[i]);
                double response = output.Max(out answer);

                int expected = labels[i];
                Assert.AreEqual(expected, answer);
            }
        }
Пример #2
0
        /// <summary>
        ///   Constructs a new Gaussian Weight initialization.
        /// </summary>
        /// 
        /// <param name="network">The activation network whose weights will be initialized.</param>
        /// <param name="stdDev">The standard deviation to be used. Common values lie in the 0.001-
        /// 0.1 range. Default is 0.1.</param>
        /// 
        public GaussianWeights(ActivationNetwork network, double stdDev = 0.1)
        {
            this.network = network;

            this.random = new NormalDistribution(0, stdDev);

            this.UpdateThresholds = false;
        }
Пример #3
0
        /// <summary>
        ///   Constructs a new Nguyen-Widrow Weight initialization.
        /// </summary>
        ///
        /// <param name="network">The activation network whose weights will be initialized.</param>
        ///
        public NguyenWidrow(ActivationNetwork network)
        {
            this.network = network;

            int hiddenNodes = network.Layers[0].Neurons.Length;
            int inputNodes  = network.Layers[0].InputsCount;

            randRange = new Range(-0.5f, 0.5f);
            beta      = 0.7 * Math.Pow(hiddenNodes, 1.0 / inputNodes);
        }
Пример #4
0
        /// <summary>
        ///   Constructs a new Nguyen-Widrow Weight initialization.
        /// </summary>
        /// 
        /// <param name="network">The activation network whose weights will be initialized.</param>
        /// 
        public NguyenWidrow(ActivationNetwork network)
        {
            this.network = network;

            int hiddenNodes = network.Layers[0].Neurons.Length;
            int inputNodes = network.Layers[0].InputsCount;

            randRange = new Range(-0.5f, 0.5f);
            beta = 0.7 * Math.Pow(hiddenNodes, 1.0 / inputNodes);
        }
        public void RunEpochTest1()
        {
            Accord.Math.Tools.SetupGenerator(0);

            double[][] input = 
            {
                new double[] { -1, -1 },
                new double[] { -1,  1 },
                new double[] {  1, -1 },
                new double[] {  1,  1 }
            };

            double[][] output =
            {
                new double[] { -1 },
                new double[] {  1 },
                new double[] {  1 },
                new double[] { -1 }
            };

            //Neuron.RandGenerator = new ThreadSafeRandom(0);
            ActivationNetwork network = new ActivationNetwork(
                   new BipolarSigmoidFunction(2), 2, 2, 1);

            var teacher = new ParallelResilientBackpropagationLearning(network);

            double error = 1.0;
            while (error > 1e-5)
                error = teacher.RunEpoch(input, output);

            for (int i = 0; i < input.Length; i++)
            {
                double actual = network.Compute(input[i])[0];
                double expected = output[i][0];

                Assert.AreEqual(expected, actual, 0.01);
                Assert.IsFalse(Double.IsNaN(actual));
            }
        }
        public void ZeroLambdaTest()
        {
            Accord.Math.Random.Generator.Seed = 0;
            double[,] data = null;

            // open selected file
            using (TextReader stream = new StringReader(Properties.Resources.ZeroLambda))
            using (CsvReader reader = new CsvReader(stream, false))
            {
                data = reader.ToTable().ToMatrix(System.Globalization.CultureInfo.InvariantCulture);
            }

            // number of learning samples
            int samples = data.GetLength(0);

            var ranges = data.GetRange(dimension: 0);

            Assert.AreEqual(2, ranges.Length);

            var rangeX = ranges[0];
            var rangeY = ranges[1];

            // data transformation factor
            double yFactor = 1.7 / rangeY.Length;
            double yMin = rangeY.Min;
            double xFactor = 2.0 / rangeX.Length;
            double xMin = rangeX.Min;

            // prepare learning data
            double[][] input = new double[samples][];
            double[][] output = new double[samples][];

            for (int i = 0; i < samples; i++)
            {
                input[i] = new double[1];
                output[i] = new double[1];

                input[i][0] = (data[i, 0] - xMin) * xFactor - 1.0; // set input
                output[i][0] = (data[i, 1] - yMin) * yFactor - 0.85; // set output
            }

            // Neuron.RandGenerator = new ThreadSafeRandom(0);
            Accord.Math.Random.Generator.Seed = 0;

            // create multi-layer neural network
            var network = new ActivationNetwork(
                new BipolarSigmoidFunction(5),
                1, 12, 1);

            // create teacher
            var teacher = new LevenbergMarquardtLearning(network, true);

            teacher.LearningRate = 1;

            // iterations
            int iteration = 1;
            int iterations = 2000;

            // solution array
            double[,] solution = new double[samples, 2];
            double[] networkInput = new double[1];

            bool needToStop = false;

            double learningError = 0;

            // loop
            while (!needToStop)
            {
                Assert.AreNotEqual(0, teacher.LearningRate);

                // run epoch of learning procedure
                double error = teacher.RunEpoch(input, output) / samples;

                // calculate solution
                for (int j = 0; j < samples; j++)
                {
                    networkInput[0] = (solution[j, 0] - xMin) * xFactor - 1.0;
                    solution[j, 1] = (network.Compute(networkInput)[0] + 0.85) / yFactor + yMin;
                }


                // calculate error
                learningError = 0.0;
                for (int j = 0; j < samples; j++)
                {
                    networkInput[0] = input[j][0];
                    learningError += Math.Abs(data[j, 1] - ((network.Compute(networkInput)[0] + 0.85) / yFactor + yMin));
                }

                // increase current iteration
                iteration++;

                // check if we need to stop
                if ((iterations != 0) && (iteration > iterations))
                    break;
            }

            Assert.IsTrue(learningError < 0.13);
        }
Пример #7
0
        private static double computeError(double[][] inputs, double[][] outputs, ActivationNetwork ann)
        {
            // Compute the machine outputs
            int miss = 0;
            for (int i = 0; i < inputs.Length; i++)
            {
                var y = System.Math.Sign(ann.Compute(inputs[i])[0]);
                var o = outputs[i][0];
                if (y != o) miss++;
            }

            return (double)miss / inputs.Length;
        }
Пример #8
0
        // Worker thread
        void SearchSolution()
        {
            // number of learning samples
            int samples = sourceMatrix.GetLength(0);

            // prepare learning data
            double[][] inputs = sourceMatrix.Submatrix(null, 0, 1).ToArray();
            double[][] outputs = sourceMatrix.GetColumn(2).Transpose().ToArray();

            // create multi-layer neural network
            ann = new ActivationNetwork(
                new BipolarSigmoidFunction(sigmoidAlphaValue),
                2, neuronsInFirstLayer, 1);

            if (useNguyenWidrow)
            {
                if (useSameWeights)
                    Accord.Math.Random.Generator.Seed = 0;

                NguyenWidrow initializer = new NguyenWidrow(ann);
                initializer.Randomize();
            }

            // create teacher
            LevenbergMarquardtLearning teacher = new LevenbergMarquardtLearning(ann, useRegularization);

            // set learning rate and momentum
            teacher.LearningRate = learningRate;

            // iterations
            iteration = 1;

            var ranges = sourceMatrix.GetRange(0);
            double[][] map = Matrix.Mesh(ranges[0], ranges[1], 0.05, 0.05);
            var sw = Stopwatch.StartNew();

            // loop
            while (!needToStop)
            {
                // run epoch of learning procedure
                error = teacher.RunEpoch(inputs, outputs) / samples;

                var result = map.Apply(ann.Compute).GetColumn(0).Apply(Math.Sign);

                var graph = map.ToMatrix().InsertColumn(result.ToDouble());

                this.Invoke((Action)(() =>
                {
                    zedGraphControl2.DataSource = graph;
                }));

                // increase current iteration
                iteration++;

                elapsed = sw.Elapsed;

                updateStatus();

                // check if we need to stop
                if ((iterations != 0) && (iteration > iterations))
                    break;
            }

            sw.Stop();

            // enable settings controls
            EnableControls(true);
        }
Пример #9
0
        // Worker thread
        void SearchSolution()
        {
            // prepare learning data
            double[][] input = new double[samples][];
            double[][] output = new double[samples][];

            for (int i = 0; i < samples; i++)
            {
                input[i] = new double[variables];
                output[i] = new double[1];

                // copy input
                for (int j = 0; j < variables; j++)
                    input[i][j] = data[i, j];
                // copy output
                output[i][0] = classes[i];
            }

            // create perceptron
            ActivationNetwork network = new ActivationNetwork(new ThresholdFunction(), variables, 1);
            ActivationNeuron neuron = network.Layers[0].Neurons[0] as ActivationNeuron;
            // create teacher
            PerceptronLearning teacher = new PerceptronLearning(network);
            // set learning rate
            teacher.LearningRate = learningRate;

            // iterations
            int iteration = 1;

            // statistic files
            StreamWriter errorsFile = null;
            StreamWriter weightsFile = null;

            try
            {
                // check if we need to save statistics to files
                if (saveStatisticsToFiles)
                {
                    // open files
                    errorsFile = File.CreateText("errors.csv");
                    weightsFile = File.CreateText("weights.csv");
                }

                // erros list
                ArrayList errorsList = new ArrayList();

                // loop
                while (!needToStop)
                {
                    // save current weights
                    if (weightsFile != null)
                    {
                        for (int i = 0; i < variables; i++)
                        {
                            weightsFile.Write(neuron.Weights[i] + ",");
                        }
                        weightsFile.WriteLine(neuron.Threshold);
                    }

                    // run epoch of learning procedure
                    double error = teacher.RunEpoch(input, output);
                    errorsList.Add(error);

                    // show current iteration
                    SetText(iterationsBox, iteration.ToString());

                    // save current error
                    if (errorsFile != null)
                    {
                        errorsFile.WriteLine(error);
                    }

                    // show classifier in the case of 2 dimensional data
                    if ((neuron.InputsCount == 2) && (neuron.Weights[1] != 0))
                    {
                        double k = -neuron.Weights[0] / neuron.Weights[1];
                        double b = -neuron.Threshold / neuron.Weights[1];

                        double[,] classifier = new double[2, 2] {
							{ chart.RangeX.Min, chart.RangeX.Min * k + b },
							{ chart.RangeX.Max, chart.RangeX.Max * k + b }
																};
                        // update chart
                        chart.UpdateDataSeries("classifier", classifier);
                    }

                    // stop if no error
                    if (error == 0)
                        break;

                    iteration++;
                }

                // show perceptron's weights
                ListViewItem item = null;

                ClearList(weightsList);
                for (int i = 0; i < variables; i++)
                {
                    item = AddListItem(weightsList, string.Format("Weight {0}", i + 1));
                    AddListSubitem(item, neuron.Weights[i].ToString("F6"));
                }
                item = AddListItem(weightsList, "Threshold");
                AddListSubitem(item, neuron.Threshold.ToString("F6"));

                // show error's dynamics
                double[,] errors = new double[errorsList.Count, 2];

                for (int i = 0, n = errorsList.Count; i < n; i++)
                {
                    errors[i, 0] = i;
                    errors[i, 1] = (double)errorsList[i];
                }

                errorChart.RangeX = new Range(0, errorsList.Count - 1);
                errorChart.RangeY = new Range(0, samples);
                errorChart.UpdateDataSeries("error", errors);
            }
            catch (IOException)
            {
                MessageBox.Show("Failed writing file", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
            finally
            {
                // close files
                if (errorsFile != null)
                    errorsFile.Close();
                if (weightsFile != null)
                    weightsFile.Close();
            }

            // enable settings controls
            EnableControls(true);
        }
Пример #10
0
        // Worker thread
        void SearchSolution()
        {
            // number of learning samples
            int samples = data.Length - predictionSize - windowSize;
            // data transformation factor
            double factor = 1.7 / chart.RangeY.Length;
            double yMin = chart.RangeY.Min;
            // prepare learning data
            double[][] input = new double[samples][];
            double[][] output = new double[samples][];

            for (int i = 0; i < samples; i++)
            {
                input[i] = new double[windowSize];
                output[i] = new double[1];

                // set input
                for (int j = 0; j < windowSize; j++)
                {
                    input[i][j] = (data[i + j] - yMin) * factor - 0.85;
                }
                // set output
                output[i][0] = (data[i + windowSize] - yMin) * factor - 0.85;
            }

            // create multi-layer neural network
            ActivationNetwork network = new ActivationNetwork(
                new BipolarSigmoidFunction(sigmoidAlphaValue),
                windowSize, windowSize * 2, 1);

            // create teacher
            var teacher = new ParallelResilientBackpropagationLearning(network);

            teacher.Reset(initialStep);

            // run at least one backpropagation epoch
            //teacher2.RunEpoch(input, output);

            // iterations
            int iteration = 1;

            // solution array
            int solutionSize = data.Length - windowSize;
            double[,] solution = new double[solutionSize, 2];
            double[] networkInput = new double[windowSize];

            // calculate X values to be used with solution function
            for (int j = 0; j < solutionSize; j++)
            {
                solution[j, 0] = j + windowSize;
            }

            // loop
            while (!needToStop)
            {
                // run epoch of learning procedure
                double error = teacher.RunEpoch(input, output) / samples;

                // calculate solution and learning and prediction errors
                double learningError = 0.0;
                double predictionError = 0.0;
                // go through all the data
                for (int i = 0, n = data.Length - windowSize; i < n; i++)
                {
                    // put values from current window as network's input
                    for (int j = 0; j < windowSize; j++)
                    {
                        networkInput[j] = (data[i + j] - yMin) * factor - 0.85;
                    }

                    // evalue the function
                    solution[i, 1] = (network.Compute(networkInput)[0] + 0.85) / factor + yMin;

                    // calculate prediction error
                    if (i >= n - predictionSize)
                    {
                        predictionError += Math.Abs(solution[i, 1] - data[windowSize + i]);
                    }
                    else
                    {
                        learningError += Math.Abs(solution[i, 1] - data[windowSize + i]);
                    }
                }
                // update solution on the chart
                chart.UpdateDataSeries("solution", solution);

                // set current iteration's info
                SetText(currentIterationBox, iteration.ToString());
                SetText(currentLearningErrorBox, learningError.ToString("F3"));
                SetText(currentPredictionErrorBox, predictionError.ToString("F3"));

                // increase current iteration
                iteration++;

                // check if we need to stop
                if ((iterations != 0) && (iteration > iterations))
                    break;
            }

            // show new solution
            for (int j = windowSize, k = 0, n = data.Length; j < n; j++, k++)
            {
                AddSubItem(dataList, j, solution[k, 1].ToString());
            }

            // enable settings controls
            EnableControls(true);
        }
Пример #11
0
        // Worker thread
        void SearchSolution()
        {
            // prepare learning data
            double[][] input = new double[samples][];
            double[][] output = new double[samples][];

            for (int i = 0; i < samples; i++)
            {
                input[i] = new double[2];
                output[i] = new double[classesCount];

                // set input
                input[i][0] = data[i, 0];
                input[i][1] = data[i, 1];
                // set output
                output[i][classes[i]] = 1;
            }

            // create perceptron
            ActivationNetwork network = new ActivationNetwork(new ThresholdFunction(), 2, classesCount);
            ActivationLayer layer = network.Layers[0] as ActivationLayer;
            // create teacher
            PerceptronLearning teacher = new PerceptronLearning(network);
            // set learning rate
            teacher.LearningRate = learningRate;

            // iterations
            int iteration = 1;

            // statistic files
            StreamWriter errorsFile = null;
            StreamWriter weightsFile = null;

            try
            {
                // check if we need to save statistics to files
                if (saveStatisticsToFiles)
                {
                    // open files
                    errorsFile = File.CreateText("errors.csv");
                    weightsFile = File.CreateText("weights.csv");
                }

                // erros list
                ArrayList errorsList = new ArrayList();

                // loop
                while (!needToStop)
                {
                    // save current weights
                    if (weightsFile != null)
                    {
                        for (int i = 0; i < classesCount; i++)
                        {
                            weightsFile.Write("neuron" + i + ",");
                            weightsFile.Write(layer.Neurons[i].Weights[0] + ",");
                            weightsFile.Write(layer.Neurons[i].Weights[1] + ",");
                            weightsFile.WriteLine(((ActivationNeuron)layer.Neurons[i]).Threshold);
                        }
                    }

                    // run epoch of learning procedure
                    double error = teacher.RunEpoch(input, output);
                    errorsList.Add(error);

                    // save current error
                    if (errorsFile != null)
                    {
                        errorsFile.WriteLine(error);
                    }

                    // show current iteration
                    SetText(iterationsBox, iteration.ToString());

                    // stop if no error
                    if (error == 0)
                        break;

                    // show classifiers
                    for (int j = 0; j < classesCount; j++)
                    {
                        double k = (layer.Neurons[j].Weights[1] != 0) ? (-layer.Neurons[j].Weights[0] / layer.Neurons[j].Weights[1]) : 0;
                        double b = (layer.Neurons[j].Weights[1] != 0) ? (-((ActivationNeuron)layer.Neurons[j]).Threshold / layer.Neurons[j].Weights[1]) : 0;

                        double[,] classifier = new double[2, 2] {
							{ chart.RangeX.Min, chart.RangeX.Min * k + b },
							{ chart.RangeX.Max, chart.RangeX.Max * k + b }
																};

                        // update chart
                        chart.UpdateDataSeries(string.Format("classifier" + j), classifier);
                    }

                    iteration++;
                }

                // show perceptron's weights
                ClearList(weightsList);
                for (int i = 0; i < classesCount; i++)
                {
                    string neuronName = string.Format("Neuron {0}", i + 1);

                    // weight 0
                    ListViewItem item = AddListItem(weightsList, neuronName);
                    AddListSubitem(item, "Weight 1");
                    AddListSubitem(item, layer.Neurons[i].Weights[0].ToString("F6"));
                    // weight 1
                    item = AddListItem(weightsList, neuronName);
                    AddListSubitem(item, "Weight 2");
                    AddListSubitem(item, layer.Neurons[i].Weights[1].ToString("F6"));
                    // threshold
                    item = AddListItem(weightsList, neuronName);
                    AddListSubitem(item, "Threshold");
                    AddListSubitem(item, ((ActivationNeuron)layer.Neurons[i]).Threshold.ToString("F6"));
                }

                // show error's dynamics
                double[,] errors = new double[errorsList.Count, 2];

                for (int i = 0, n = errorsList.Count; i < n; i++)
                {
                    errors[i, 0] = i;
                    errors[i, 1] = (double)errorsList[i];
                }

                errorChart.RangeX = new Range(0, errorsList.Count - 1);
                errorChart.UpdateDataSeries("error", errors);
            }
            catch (IOException)
            {
                MessageBox.Show("Failed writing file", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
            finally
            {
                // close files
                if (errorsFile != null)
                    errorsFile.Close();
                if (weightsFile != null)
                    weightsFile.Close();
            }

            // enable settings controls
            EnableControls(true);
        }
        public void BlockHessianTest1()
        {
            // Network with no hidden layers: 3-1

            Accord.Math.Tools.SetupGenerator(0);

            double[][] input = 
            {
                new double[] {-1, -1 },
                new double[] {-1,  1 },
                new double[] { 1, -1 },
                new double[] { 1,  1 }
            };

            double[][] output =
            {
                new double[] {-1 },
                new double[] { 1 },
                new double[] { 1 },
                new double[] {-1 }
            };

            ActivationNetwork network = new ActivationNetwork(
                   new BipolarSigmoidFunction(2), 2, 1);

            var teacher1 = new LevenbergMarquardtLearning(network,
                false, JacobianMethod.ByFiniteDifferences);

            var teacher2 = new LevenbergMarquardtLearning(network,
                false, JacobianMethod.ByBackpropagation);
            teacher2.Blocks = 2;

            // Set lambda to lambda max so no iterations are performed
            teacher1.LearningRate = 1e30f;
            teacher2.LearningRate = 1e30f;

            teacher1.RunEpoch(input, output);
            teacher2.RunEpoch(input, output);

            var hessian1 = teacher1.Hessian;
            var hessian2 = teacher1.Hessian;

            for (int i = 0; i < hessian1.Length; i++)
            {
                for (int j = 0; j < hessian1[i].Length; j++)
                {
                    double j1 = hessian1[i][j];
                    double j2 = hessian2[i][j];

                    Assert.AreEqual(j1, j2, 1e-4);

                    Assert.IsFalse(Double.IsNaN(j1));
                    Assert.IsFalse(Double.IsNaN(j2));
                }
            }

            Assert.IsTrue(hessian1.IsUpperTriangular());
            Assert.IsTrue(hessian2.IsUpperTriangular());

            var gradient1 = teacher1.Gradient;
            var gradient2 = teacher2.Gradient;

            for (int i = 0; i < gradient1.Length; i++)
            {
                double j1 = gradient1[i];
                double j2 = gradient2[i];

                Assert.AreEqual(j1, j2, 1e-5);

                Assert.IsFalse(Double.IsNaN(j1));
                Assert.IsFalse(Double.IsNaN(j2));
            }
        }
Пример #13
0
        // Worker thread
        void SearchSolution()
        {
            // number of learning samples
            int samples = data.GetLength(0);

            // prepare learning data
            DoubleRange unit = new DoubleRange(-1, 1);
            double[][] input = data.GetColumn(0).Scale(fromRange: xRange, toRange: unit).ToArray();
            double[][] output = data.GetColumn(1).Scale(fromRange: yRange, toRange: unit).ToArray();


            // create multi-layer neural network
            ActivationNetwork network = new ActivationNetwork(
                new BipolarSigmoidFunction(sigmoidAlphaValue),
                1, neuronsInFirstLayer, 1);

            if (useNguyenWidrow)
            {
                new NguyenWidrow(network).Randomize();
            }

            // create teacher
            var teacher = new ParallelResilientBackpropagationLearning(network);

            // iterations
            int iteration = 1;

            // solution array
            double[,] solution = new double[samples, 2];


            // loop
            while (!needToStop)
            {
                // run epoch of learning procedure
                double error = teacher.RunEpoch(input, output) / samples;

                // calculate solution
                for (int j = 0; j < samples; j++)
                {
                    double x = input[j][0];
                    double y = network.Compute(new[] { x })[0];
                    solution[j, 0] = x.Scale(fromRange: unit, toRange: xRange);
                    solution[j, 1] = y.Scale(fromRange: unit, toRange: yRange);
                }

                chart.UpdateDataSeries("solution", solution);

                // calculate error
                double learningError = 0.0;
                for (int j = 0; j < samples; j++)
                {
                    double x = input[j][0];
                    double expected = data[j, 1];
                    double actual = network.Compute(new[] { x })[0];
                    learningError += Math.Abs(expected - actual);
                }

                // set current iteration's info
                SetText(currentIterationBox, iteration.ToString());
                SetText(currentErrorBox, learningError.ToString("F3"));

                // increase current iteration
                iteration++;

                // check if we need to stop
                if ((iterations != 0) && (iteration > iterations))
                    break;
            }


            // enable settings controls
            EnableControls(true);
        }
        public void JacobianByChainRuleTest_MultipleOutput()
        {
            // Network with no hidden layers: 3-4

            int numberOfInputs = 3;
            int numberOfClasses = 4;

            double[][] input = 
            {
                new double[] { -1, -1, -1 }, // 0
                new double[] { -1,  1, -1 }, // 1
                new double[] {  1, -1, -1 }, // 1
                new double[] {  1,  1, -1 }, // 0
                new double[] { -1, -1,  1 }, // 2
                new double[] { -1,  1,  1 }, // 3
                new double[] {  1, -1,  1 }, // 3
                new double[] {  1,  1,  1 }  // 2
            };

            int[] labels =
            {
                0,
                1,
                1,
                0,
                2,
                3,
                3,
                2,
            };

            double[][] output = Accord.Statistics.Tools
                .Expand(labels, numberOfClasses, -1, 1);

            // Neuron.RandGenerator = new ThreadSafeRandom(0);
            Accord.Math.Random.Generator.Seed = 0;

            ActivationNetwork network = new ActivationNetwork(
                   new BipolarSigmoidFunction(2), numberOfInputs, numberOfClasses);

            var teacher1 = new LevenbergMarquardtLearning(network,
                false, JacobianMethod.ByFiniteDifferences);

            var teacher2 = new LevenbergMarquardtLearning(network,
                false, JacobianMethod.ByBackpropagation);

            // Set lambda to lambda max so no iterations are performed
            teacher1.LearningRate = 1e30f;
            teacher2.LearningRate = 1e30f;

            teacher1.RunEpoch(input, output);
            teacher2.RunEpoch(input, output);

            var jacobian1 = teacher1.Jacobian;
            var jacobian2 = teacher2.Jacobian;


            for (int i = 0; i < jacobian1.Length; i++)
            {
                for (int j = 0; j < jacobian1[i].Length; j++)
                {
                    double j1 = jacobian1[i][j];
                    double j2 = jacobian2[i][j];

                    Assert.AreEqual(j1, j2, 1e-3);

                    Assert.IsFalse(Double.IsNaN(j1));
                    Assert.IsFalse(Double.IsNaN(j2));
                }
            }
        }
        public void JacobianByChainRuleTest4()
        {
            // Network with no hidden layers: 3-1

            double[][] input = 
            {
                new double[] {-1, -1 },
                new double[] {-1,  1 },
                new double[] { 1, -1 },
                new double[] { 1,  1 }
            };

            double[][] output =
            {
                new double[] {-1 },
                new double[] { 1 },
                new double[] { 1 },
                new double[] {-1 }
            };

            // Neuron.RandGenerator = new ThreadSafeRandom(0);
            Accord.Math.Random.Generator.Seed = 0;

            ActivationNetwork network = new ActivationNetwork(
                   new BipolarSigmoidFunction(2), 2, 1);

            var teacher1 = new LevenbergMarquardtLearning(network,
                false, JacobianMethod.ByFiniteDifferences);

            var teacher2 = new LevenbergMarquardtLearning(network,
                false, JacobianMethod.ByBackpropagation);

            // Set lambda to lambda max so no iterations are performed
            teacher1.LearningRate = 1e30f;
            teacher2.LearningRate = 1e30f;

            teacher1.RunEpoch(input, output);
            teacher2.RunEpoch(input, output);

            var jacobian1 = teacher1.Jacobian;
            var jacobian2 = teacher2.Jacobian;


            for (int i = 0; i < jacobian1.Length; i++)
            {
                for (int j = 0; j < jacobian1[i].Length; j++)
                {
                    double j1 = jacobian1[i][j];
                    double j2 = jacobian2[i][j];

                    Assert.AreEqual(j1, j2, 1e-5);

                    Assert.IsFalse(Double.IsNaN(j1));
                    Assert.IsFalse(Double.IsNaN(j2));
                }
            }
        }
        public void RunEpochTest3()
        {
            double[,] dataset = yinyang;

            double[][] input = dataset.GetColumns(new[] { 0, 1 }).ToJagged();
            double[][] output = dataset.GetColumn(2).ToJagged();

            // Neuron.RandGenerator = new ThreadSafeRandom(0);
            Accord.Math.Random.Generator.Seed = 0;

            ActivationNetwork network = new ActivationNetwork(
                   new BipolarSigmoidFunction(2), 2, 5, 1);

            var teacher = new LevenbergMarquardtLearning(network,
                true, JacobianMethod.ByBackpropagation);

            Assert.IsTrue(teacher.UseRegularization);

            double error = 1.0;
            for (int i = 0; i < 500; i++)
                error = teacher.RunEpoch(input, output);

            double[][] actual = new double[output.Length][];

            for (int i = 0; i < input.Length; i++)
                actual[i] = network.Compute(input[i]);

            for (int i = 0; i < input.Length; i++)
                Assert.AreEqual(Math.Sign(output[i][0]), Math.Sign(actual[i][0]));
        }
        public void ConstructorTest()
        {
            // Four training samples of the xor function

            // two inputs (x and y)
            double[][] input = 
            {
                new double[] { -1, -1 },
                new double[] { -1,  1 },
                new double[] {  1, -1 },
                new double[] {  1,  1 }
            };

            // one output (z = x ^ y)
            double[][] output = 
            {
                new double[] { -1 },
                new double[] {  1 },
                new double[] {  1 },
                new double[] { -1 }
            };


            // create multi-layer neural network
            ActivationNetwork network = new ActivationNetwork(
                   new BipolarSigmoidFunction(2), // use a bipolar sigmoid activation function
                   2, // two inputs
                   3, // three hidden neurons
                   1  // one output neuron
                   );

            // create teacher
            LevenbergMarquardtLearning teacher = new LevenbergMarquardtLearning(
                network, // the neural network
                false,   // whether or not to use Bayesian regularization
                JacobianMethod.ByBackpropagation // Jacobian calculation method
                );


            // set learning rate and momentum
            teacher.LearningRate = 0.1f;

            // start the supervisioned learning
            for (int i = 0; i < 1000; i++)
            {
                double error = teacher.RunEpoch(input, output);
            }

            // If we reached here, the constructor test has passed.
        }
        public void RunEpochTest4()
        {
            Accord.Math.Tools.SetupGenerator(0);

            double[][] input = 
            {
                new double[] { 0, 0 },
            };

            double[][] output =
            {
                new double[] { 0 },
            };

            // Neuron.RandGenerator = new ThreadSafeRandom(0);
            ActivationNetwork network = new ActivationNetwork(
                   new BipolarSigmoidFunction(2), 2, 1);

            var teacher = new LevenbergMarquardtLearning(network,
                true, JacobianMethod.ByBackpropagation);

            double error = 1.0;
            for (int i = 0; i < 1000; i++)
                error = teacher.RunEpoch(input, output);

            for (int i = 0; i < input.Length; i++)
                Assert.AreEqual(network.Compute(input[i])[0], output[i][0], 0.1);
        }
        public void RunEpochTest1()
        {
            Accord.Math.Tools.SetupGenerator(0);

            double[][] input = 
            {
                new double[] { -1, -1 },
                new double[] { -1,  1 },
                new double[] {  1, -1 },
                new double[] {  1,  1 }
            };

            double[][] output =
            {
                new double[] { -1 },
                new double[] {  1 },
                new double[] {  1 },
                new double[] { -1 }
            };

            // Neuron.RandGenerator = new ThreadSafeRandom(0);

            ActivationNetwork network = new ActivationNetwork(
                   new BipolarSigmoidFunction(2), 2, 2, 1);

            var teacher = new LevenbergMarquardtLearning(network,
                false, JacobianMethod.ByFiniteDifferences);

            double error = 1.0;
            while (error > 1e-5)
                error = teacher.RunEpoch(input, output);

            for (int i = 0; i < input.Length; i++)
                Assert.AreEqual(network.Compute(input[i])[0], output[i][0], 0.1);
        }
Пример #20
0
        // Worker thread
        void SearchSolution()
        {
            // initialize input and output values
            double[][] input = null;
            double[][] output = null;

            if (sigmoidType == 0)
            {
                // unipolar data
                input = new double[4][] {
                                            new double[] {0, 0},
                                            new double[] {0, 1},
                                            new double[] {1, 0},
                                            new double[] {1, 1}
                                        };
                output = new double[4][] {
                                             new double[] {0},
                                             new double[] {1},
                                             new double[] {1},
                                             new double[] {0}
                                         };
            }
            else
            {
                // bipolar data
                input = new double[4][] {
                                            new double[] {-1, -1},
                                            new double[] {-1,  1},
                                            new double[] { 1, -1},
                                            new double[] { 1,  1}
                                        };
                output = new double[4][] {
                                             new double[] {-1},
                                             new double[] { 1},
                                             new double[] { 1},
                                             new double[] {-1}
                                         };
            }

            // create neural network
            ActivationNetwork network = new ActivationNetwork(
                (sigmoidType == 0) ?
                    (IActivationFunction)new SigmoidFunction(sigmoidAlphaValue) :
                    (IActivationFunction)new BipolarSigmoidFunction(sigmoidAlphaValue),
                2, 2, 1);

            // create teacher
            LevenbergMarquardtLearning teacher = new LevenbergMarquardtLearning(network);

            // set learning rate 
            teacher.LearningRate = learningRate;


            // iterations
            int iteration = 0;

            // statistic files
            StreamWriter errorsFile = null;

            try
            {
                // check if we need to save statistics to files
                if (saveStatisticsToFiles)
                {
                    // open files
                    errorsFile = File.CreateText("errors.csv");
                }

                // erros list
                ArrayList errorsList = new ArrayList();

                // loop
                while (!needToStop)
                {
                    // run epoch of learning procedure
                    double error = teacher.RunEpoch(input, output);
                    errorsList.Add(error);

                    // save current error
                    if (errorsFile != null)
                    {
                        errorsFile.WriteLine(error);
                    }

                    // show current iteration & error
                    SetText(currentIterationBox, iteration.ToString());
                    SetText(currentErrorBox, error.ToString());
                    iteration++;

                    // check if we need to stop
                    if (error <= learningErrorLimit)
                        break;
                }

                // show error's dynamics
                double[,] errors = new double[errorsList.Count, 2];

                for (int i = 0, n = errorsList.Count; i < n; i++)
                {
                    errors[i, 0] = i;
                    errors[i, 1] = (double)errorsList[i];
                }

                errorChart.RangeX = new Range(0, errorsList.Count - 1);
                errorChart.UpdateDataSeries("error", errors);
            }
            catch (IOException)
            {
                MessageBox.Show("Failed writing file", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
            finally
            {
                // close files
                if (errorsFile != null)
                    errorsFile.Close();
            }

            // enable settings controls
            EnableControls(true);
        }
        public void MulticlassTest1()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Suppose we would like to teach a network to recognize 
            // the following input vectors into 3 possible classes:
            //
            double[][] inputs =
            {
                new double[] { 0, 1, 1, 0 }, // 0
                new double[] { 0, 1, 0, 0 }, // 0
                new double[] { 0, 0, 1, 0 }, // 0
                new double[] { 0, 1, 1, 0 }, // 0
                new double[] { 0, 1, 0, 0 }, // 0
                new double[] { 1, 0, 0, 0 }, // 1
                new double[] { 1, 0, 0, 0 }, // 1
                new double[] { 1, 0, 0, 1 }, // 1
                new double[] { 0, 0, 0, 1 }, // 1
                new double[] { 0, 0, 0, 1 }, // 1
                new double[] { 1, 1, 1, 1 }, // 2
                new double[] { 1, 0, 1, 1 }, // 2
                new double[] { 1, 1, 0, 1 }, // 2
                new double[] { 0, 1, 1, 1 }, // 2
                new double[] { 1, 1, 1, 1 }, // 2
            };

            int[] classes =
            {
                0, 0, 0, 0, 0,
                1, 1, 1, 1, 1,
                2, 2, 2, 2, 2,
            };

            // First we have to convert this problem into a way that  the neural
            // network can handle. The first step is to expand the classes into 
            // indicator vectors, where a 1 into a position signifies that this
            // position indicates the class the sample belongs to.
            //
            double[][] outputs = Statistics.Tools.Expand(classes, -1, +1);

            // Create an activation function for the net
            var function = new BipolarSigmoidFunction();

            // Create an activation network with the function and
            //  4 inputs, 5 hidden neurons and 3 possible outputs:
            var network = new ActivationNetwork(function, 4, 5, 3);

            // Randomly initialize the network
            new NguyenWidrow(network).Randomize();

            // Teach the network using parallel Rprop:
            var teacher = new ParallelResilientBackpropagationLearning(network);

            double error = 1.0;
            while (error > 1e-5)
                error = teacher.RunEpoch(inputs, outputs);


            // Checks if the network has learned
            for (int i = 0; i < inputs.Length; i++)
            {
                double[] answer = network.Compute(inputs[i]);

                int expected = classes[i];
                int actual; answer.Max(out actual);

                Assert.AreEqual(expected, actual, 0.01);
            }
        }
Пример #22
0
        // Worker thread
        void SearchSolution()
        {
            bool reducedNetwork = ((classesCount == 2) && (useOneNeuronForTwoClasses));

            // prepare learning data
            double[][] input = new double[samples][];
            double[][] output = new double[samples][];

            for (int i = 0; i < samples; i++)
            {
                input[i] = new double[variables];
                output[i] = new double[neuronsCount];

                // set input
                for (int j = 0; j < variables; j++)
                    input[i][j] = data[i, j];
                // set output
                if (reducedNetwork)
                {
                    output[i][0] = classes[i];
                }
                else
                {
                    output[i][classes[i]] = 1;
                }
            }

            // create perceptron
            ActivationNetwork network = new ActivationNetwork(
                new SigmoidFunction(sigmoidAlphaValue), variables, neuronsCount);
            ActivationLayer layer = network.Layers[0] as ActivationLayer;
            // create teacher
            DeltaRuleLearning teacher = new DeltaRuleLearning(network);
            // set learning rate
            teacher.LearningRate = learningRate;

            // iterations
            int iteration = 1;

            // statistic files
            StreamWriter errorsFile = null;
            StreamWriter weightsFile = null;

            try
            {
                // check if we need to save statistics to files
                if (saveStatisticsToFiles)
                {
                    // open files
                    errorsFile = File.CreateText("errors.csv");
                    weightsFile = File.CreateText("weights.csv");
                }

                // erros list
                ArrayList errorsList = new ArrayList();

                // loop
                while (!needToStop)
                {
                    // save current weights
                    if (weightsFile != null)
                    {
                        for (int i = 0; i < neuronsCount; i++)
                        {
                            weightsFile.Write("neuron" + i + ",");
                            for (int j = 0; j < variables; j++)
                                weightsFile.Write(layer.Neurons[i].Weights[j] + ",");
                            weightsFile.WriteLine(((ActivationNeuron)layer.Neurons[i]).Threshold);
                        }
                    }

                    // run epoch of learning procedure
                    double error = teacher.RunEpoch(input, output) / samples;
                    errorsList.Add(error);

                    // save current error
                    if (errorsFile != null)
                    {
                        errorsFile.WriteLine(error);
                    }

                    // show current iteration & error
                    SetText(currentIterationBox, iteration.ToString());
                    SetText(currentErrorBox, error.ToString());
                    iteration++;

                    // check if we need to stop
                    if ((useErrorLimit) && (error <= learningErrorLimit))
                        break;
                    if ((!useErrorLimit) && (iterationLimit != 0) && (iteration > iterationLimit))
                        break;
                }

                // show perceptron's weights
                ClearList(weightsList);
                for (int i = 0; i < neuronsCount; i++)
                {
                    string neuronName = string.Format("Neuron {0}", i + 1);
                    ListViewItem item = null;

                    // add all weights
                    for (int j = 0; j < variables; j++)
                    {
                        item = AddListItem(weightsList, neuronName);
                        AddListSubitem(item, string.Format("Weight {0}", j + 1));
                        AddListSubitem(item, layer.Neurons[i].Weights[0].ToString("F6"));
                    }
                    // threshold
                    item = AddListItem(weightsList, neuronName);
                    AddListSubitem(item, "Threshold");
                    AddListSubitem(item, ((ActivationNeuron)layer.Neurons[i]).Threshold.ToString("F6"));
                }

                // show error's dynamics
                double[,] errors = new double[errorsList.Count, 2];

                for (int i = 0, n = errorsList.Count; i < n; i++)
                {
                    errors[i, 0] = i;
                    errors[i, 1] = (double)errorsList[i];
                }

                errorChart.RangeX = new Range(0, errorsList.Count - 1);
                errorChart.UpdateDataSeries("error", errors);
            }
            catch (IOException)
            {
                MessageBox.Show("Failed writing file", "Error", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
            finally
            {
                // close files
                if (errorsFile != null)
                    errorsFile.Close();
                if (weightsFile != null)
                    weightsFile.Close();
            }

            // enable settings controls
            EnableControls(true);
        }