public void TestTrain()
        {

            double[][] x = {
                new [] {1.0},
                new [] {3.0},
                new [] {2.0},
                new [] {200.0},
                new [] {230.0}};

            double[][] y = {
                new [] {1.0},
                new [] {1.0},
                new [] {1.0},
                new [] {0.0},
                new [] {0.0}
        };


            var trainingData = BasicData.ConvertArrays(x, y);
            var regression = new MultipleLinearRegression(1) {LinkFunction = new LogitLinkFunction()};
            var train = new TrainReweightLeastSquares(regression, trainingData);
            train.Iteration();

            double[] input = { 0 };
            double[] output = regression.ComputeRegression(input);
            Assert.AreEqual(0.883301730269988, output[0], AIFH.DefaultPrecision);
        }
Exemplo n.º 2
0
        public void TestTrain()
        {
            double[][] x = {
                new [] {5.0, 10.0, 2.0},
                new []{10.0, 20.0, 4.0},
                new []{15.0, 30.0, 6.0},
                new []{20.0, 40.0, 8.0},
                new []{25.0, 50.0, 10.0}};

            double[][] y = {
                new []{70.0},
                new []{132.0},
                new []{194.0},
                new []{256.0},
                new []{318.0}
            };

            var trainingData = BasicData.ConvertArrays(x, y);
            var regression = new MultipleLinearRegression(3);
            var train = new TrainLeastSquares(regression, trainingData);
            train.Iteration();

            Assert.AreEqual(8, regression.LongTermMemory[0], 0.0001);
            Assert.AreEqual(-54.8, regression.LongTermMemory[1], 0.0001);
            Assert.AreEqual(8, regression.LongTermMemory[2], 0.0001);
            Assert.AreEqual(1, train.R2, 0.0001);
            Assert.AreEqual(0, train.Error, AIFH.DefaultPrecision);

            for (int i = 0; i < x.Length; i++)
            {
                double[] output = regression.ComputeRegression(x[i]);
                Assert.AreEqual(y[i][0], output[0], 0.0001);
            }
        }
Exemplo n.º 3
0
        /// <summary>
        ///     Train.  Single iteration.
        /// </summary>
        public void Iteration()
        {
            int rowCount      = _trainingData.Count;
            int inputColCount = _trainingData[0].Input.Length;

            Matrix <double> xMatrix = new DenseMatrix(rowCount, inputColCount + 1);
            Matrix <double> yMatrix = new DenseMatrix(rowCount, 1);

            for (int row = 0; row < _trainingData.Count; row++)
            {
                BasicData dataRow = _trainingData[row];
                int       colSize = dataRow.Input.Count();

                xMatrix[row, 0] = 1;
                for (int col = 0; col < colSize; col++)
                {
                    xMatrix[row, col + 1] = dataRow.Input[col];
                }
                yMatrix[row, 0] = dataRow.Ideal[0];
            }

            // Calculate the least squares solution
            QR qr = xMatrix.QR();
            Matrix <double> beta = qr.Solve(yMatrix);

            double sum = 0.0;

            for (int i = 0; i < inputColCount; i++)
            {
                sum += yMatrix[i, 0];
            }
            double mean = sum / inputColCount;

            for (int i = 0; i < inputColCount; i++)
            {
                double dev = yMatrix[i, 0] - mean;
                _sst += dev * dev;
            }

            Matrix <double> residuals = xMatrix.Multiply(beta).Subtract(yMatrix);

            _sse = residuals.L2Norm() * residuals.L2Norm();

            for (int i = 0; i < _algorithm.LongTermMemory.Length; i++)
            {
                _algorithm.LongTermMemory[i] = beta[i, 0];
            }

            // calculate error
            _errorCalculation.Clear();
            foreach (BasicData dataRow in _trainingData)
            {
                double[] output = _algorithm.ComputeRegression(dataRow.Input);
                _errorCalculation.UpdateError(output, dataRow.Ideal, 1.0);
            }
            _error = _errorCalculation.Calculate();
        }
        public void TestBasic()
        {
            var reg = new MultipleLinearRegression(1);

            Assert.AreEqual(2, reg.LongTermMemory.Length);

            var lnk = new LogLinkFunction();
            reg.LinkFunction = lnk;
            Assert.IsTrue(reg.LinkFunction == lnk);

            reg.LongTermMemory[0] = 1;
            reg.LongTermMemory[1] = 2;

            double[] input = { 1.0 };
            double[] output = reg.ComputeRegression(input);
            Assert.AreEqual(1, output.Length);
            Assert.AreEqual(1.0986122886681098, output[0], AIFH.DefaultPrecision);
        }
        /// <summary>
        ///     Perform one iteration of training.
        /// </summary>
        public void Iteration()
        {
            int rowCount   = _trainingData.Count;
            int coeffCount = _algorithm.LongTermMemory.Length;

            var working = new double[rowCount, coeffCount];
            var errors  = new double[rowCount];
            var weights = new double[rowCount];

            for (int i = 0; i < rowCount; i++)
            {
                BasicData element = _trainingData[i];

                working[i, 0] = 1;
                for (int j = 0; j < element.Input.Length; j++)
                {
                    working[i, j + 1] = element.Input[j];
                }
            }

            for (int i = 0; i < rowCount; i++)
            {
                BasicData element = _trainingData[i];
                double    y       = _algorithm.ComputeRegression(element.Input)[0];
                errors[i]  = y - element.Ideal[0];
                weights[i] = y * (1.0 - y);
            }

            for (int i = 0; i < coeffCount; i++)
            {
                _gradient[i, 0] = 0;
                for (int j = 0; j < coeffCount; j++)
                {
                    _hessian[i, j] = 0;
                }
            }

            for (int j = 0; j < rowCount; j++)
            {
                for (int i = 0; i < coeffCount; i++)
                {
                    _gradient[i, 0] += working[j, i] * errors[j];
                }
            }

            for (int k = 0; k < weights.Length; k++)
            {
                for (int j = 0; j < coeffCount; j++)
                {
                    for (int i = 0; i < coeffCount; i++)
                    {
                        _hessian[j, i] += working[k, i] * working[k, j] * weights[k];
                    }
                }
            }

            LU <double> lu = _hessian.LU();

            Matrix <double> deltas = lu.Solve(_gradient);

            var prev = (double[])_algorithm.LongTermMemory.Clone();

            for (int i = 0; i < _algorithm.LongTermMemory.Length; i++)
            {
                _algorithm.LongTermMemory[i] -= deltas[i, 0];
            }

            double max = 0;

            for (int i = 0; i < deltas.ColumnCount; i++)
            {
                max = Math.Max(Math.Abs(deltas[i, 0]) / Math.Abs(prev[i]), max);
            }

            _error = max;
        }