public void LinearRegressionWithResidualSumOfSquares()
        {
            // obtain the test data
            var trainingSet = new List<DataPoint<double>>
            {
                new DataPoint<double>(-1, -1.5),
                new DataPoint<double>(0, 0.5),
                new DataPoint<double>(1, 2.5),
                new DataPoint<double>(2, 4.5),
                new DataPoint<double>(3, 6.5)
            };

            // assume a hypothesis
            var hypothesis = new LinearHypothesis(1);
            var initialCoefficients = Vector<double>.Build.Random(2);

            // cost function is sum of squared errors
            var costFunction = new ResidualSumOfSquaresCostFunction(hypothesis, trainingSet);

            // define the optimization problem
            var problem = new OptimizationProblem<double, IDifferentiableCostFunction<double>>(costFunction, initialCoefficients);

            // optimize!
            var gd = new ResilientErrorGD
            {
                ErrorTolerance = 0.0D
            };
            var result = gd.Minimize(problem);

            // assert!
            var coefficients = result.Coefficients;
            coefficients[0].Should().BeApproximately(0.5, 1E-6D, "because that's the underlying system's intercept");
            coefficients[1].Should().BeApproximately(2, 1E-6D, "because that's the underlying system's slope");
        }
        public void LinearRegressionWithResidualSumOfSquares()
        {
            // obtain the test data
            var trainingSet = new List <DataPoint <double> >
            {
                new DataPoint <double>(-1, -1.5),
                new DataPoint <double>(0, 0.5),
                new DataPoint <double>(1, 2.5),
                new DataPoint <double>(2, 4.5),
                new DataPoint <double>(3, 6.5)
            };

            // assume a hypothesis
            var hypothesis          = new LinearHypothesis(1);
            var initialCoefficients = Vector <double> .Build.Random(2);

            // cost function is sum of squared errors
            var costFunction = new ResidualSumOfSquaresCostFunction(hypothesis, trainingSet);

            // define the optimization problem
            var problem = new OptimizationProblem <double, IDifferentiableCostFunction <double> >(costFunction, initialCoefficients);

            // optimize!
            var gd = new ResilientErrorGD
            {
                ErrorTolerance = 0.0D
            };
            var result = gd.Minimize(problem);

            // assert!
            var coefficients = result.Coefficients;

            coefficients[0].Should().BeApproximately(0.5, 1E-6D, "because that's the underlying system's intercept");
            coefficients[1].Should().BeApproximately(2, 1E-6D, "because that's the underlying system's slope");
        }
        public void TrivialLinearHypothesisIsIdentity([Random(5)] double value)
        {
            var h = new LinearHypothesis(1);
            var theta = Vector<double>.Build.Dense(new[] {0D, 1D});
            var inputs = Vector<double>.Build.Dense(1, value);

            var outputs = h.Evaluate(theta, inputs);
            outputs.Count.Should().Be(1, "because one output is expected");
            outputs.Single().Should().BeApproximately(value, 1E-5D, "because the function is linear");

            var derivative = h.CoefficientJacobian(theta, inputs, outputs);
            derivative.Count.Should().Be(2, "because two coefficients are evaluated");
            // ReSharper disable once CompareOfFloatsByEqualityOperator
            derivative[0].Should().Be(1D, "because the offset is independent of the inmput");
            derivative[1].Should().Be(value, "because the coefficient's derivative is the input");
        }
Exemple #4
0
        public void TrivialLinearHypothesisIsIdentity([Random(5)] double value)
        {
            var h     = new LinearHypothesis(1);
            var theta = Vector <double> .Build.Dense(new[] { 0D, 1D });

            var inputs = Vector <double> .Build.Dense(1, value);

            var outputs = h.Evaluate(theta, inputs);

            outputs.Count.Should().Be(1, "because one output is expected");
            outputs.Single().Should().BeApproximately(value, 1E-5D, "because the function is linear");

            var derivative = h.CoefficientJacobian(theta, inputs, outputs);

            derivative.Count.Should().Be(2, "because two coefficients are evaluated");
            // ReSharper disable once CompareOfFloatsByEqualityOperator
            derivative[0].Should().Be(1D, "because the offset is independent of the inmput");
            derivative[1].Should().Be(value, "because the coefficient's derivative is the input");
        }