예제 #1
0
        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair.</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
        /// </returns>
        public MultipleLinearRegression Learn(double[][] x, double[] y, double[] weights = null)
        {
            //var Xt = x.Transpose();
            if (UseIntercept)
            {
                x = x.InsertColumn(value: 1.0);
            }

            decomposition = x.Decompose(leastSquares: IsRobust);
            double[] coefficients = decomposition.Solve(y);

            if (UseIntercept)
            {
                return(new MultipleLinearRegression()
                {
                    Weights = coefficients.Get(0, -1),
                    Intercept = coefficients.Get(-1)
                });
            }
            else
            {
                return(new MultipleLinearRegression()
                {
                    Weights = coefficients,
                });
            }
        }
예제 #2
0
        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair.</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
        /// </returns>
        public SimpleLinearRegression Learn(double[] x, double[] y, double[] weights = null)
        {
            double[][] X = Jagged.ColumnVector(x);

            if (UseIntercept)
            {
                X = X.InsertColumn(value: 1);
            }

            decomposition = X.Decompose(leastSquares: IsRobust);
            double[] coefficients = decomposition.Solve(y);

            if (UseIntercept)
            {
                return(new SimpleLinearRegression()
                {
                    Slope = coefficients[0],
                    Intercept = coefficients[1]
                });
            }
            else
            {
                return(new SimpleLinearRegression()
                {
                    Slope = coefficients[0],
                });
            }
        }
예제 #3
0
        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair (if supported by the learning algorithm).</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
        /// </returns>
        public MultivariateLinearRegression Learn(double[][] x, double[][] y, double[] weights = null)
        {
            if (UseIntercept)
            {
                x = x.InsertColumn(value: 1.0);
            }

            if (weights != null)
            {
                double[] sqrtW = weights.Sqrt();
                x = Elementwise.Multiply(x, sqrtW, dimension: 1);
                y = Elementwise.Multiply(y, sqrtW, dimension: 1);
            }

            decomposition = x.Decompose(leastSquares: IsRobust);
            double[][] coefficients = decomposition.Solve(y);

            if (UseIntercept)
            {
                return(new MultivariateLinearRegression()
                {
                    Weights = coefficients.Get(0, -1, null),
                    Intercepts = coefficients.GetRow(-1)
                });
            }

            return(new MultivariateLinearRegression()
            {
                Weights = coefficients,
            });
        }
예제 #4
0
        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair (if supported by the learning algorithm).</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
        /// </returns>
        public SimpleLinearRegression Learn(double[] x, double[] y, double[] weights = null)
        {
            double[][] X = Jagged.ColumnVector(x);

            if (UseIntercept)
            {
                X = X.InsertColumn(value: 1);
            }

            if (weights != null)
            {
                double[] sqrtW = weights.Sqrt();
                X = Elementwise.Multiply(X, sqrtW, dimension: 1, result: X);
                y = Elementwise.Multiply(y, sqrtW);
            }

            decomposition = X.Decompose(leastSquares: IsRobust);
            double[] coefficients = decomposition.Solve(y);

            if (UseIntercept)
            {
                return(new SimpleLinearRegression()
                {
                    Slope = coefficients[0],
                    Intercept = coefficients[1]
                });
            }
            else
            {
                return(new SimpleLinearRegression()
                {
                    Slope = coefficients[0],
                });
            }
        }
예제 #5
0
        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair (if supported by the learning algorithm).</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
        /// </returns>
        public MultivariateLinearRegression Learn(double[][] x, double[][] y, double[] weights = null)
        {
            if (weights != null)
            {
                throw new ArgumentException(Accord.Properties.Resources.NotSupportedWeights, "weights");
            }

            if (UseIntercept)
            {
                x = x.InsertColumn(value: 1.0);
            }

            decomposition = x.Decompose(leastSquares: IsRobust);
            double[][] coefficients = decomposition.Solve(y);

            if (UseIntercept)
            {
                return(new MultivariateLinearRegression()
                {
                    Weights = coefficients.Get(0, -1, null),
                    Intercepts = coefficients.GetRow(-1)
                });
            }

            return(new MultivariateLinearRegression()
            {
                Weights = coefficients,
            });
        }
예제 #6
0
        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair (if supported by the learning algorithm).</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
        /// </returns>
        public SimpleLinearRegression Learn(double[] x, double[] y, double[] weights = null)
        {
            if (weights != null)
            {
                throw new ArgumentException(Accord.Properties.Resources.NotSupportedWeights, "weights");
            }

            double[][] X = Jagged.ColumnVector(x);

            if (UseIntercept)
            {
                X = X.InsertColumn(value: 1);
            }

            decomposition = X.Decompose(leastSquares: IsRobust);
            double[] coefficients = decomposition.Solve(y);

            if (UseIntercept)
            {
                return(new SimpleLinearRegression()
                {
                    Slope = coefficients[0],
                    Intercept = coefficients[1]
                });
            }
            else
            {
                return(new SimpleLinearRegression()
                {
                    Slope = coefficients[0],
                });
            }
        }
예제 #7
0
        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair.</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
        /// </returns>
        public MultivariateLinearRegression Learn(double[][] x, double[][] y, double[] weights = null)
        {
            if (UseIntercept)
            {
                x = x.InsertColumn(value: 1.0);
            }

            decomposition = x.Decompose(leastSquares: IsRobust);
            double[][] coefficients = decomposition.Solve(y);

            if (UseIntercept)
            {
                return(new MultivariateLinearRegression()
                {
                    Weights = coefficients.Get(0, -1, null),
                    Intercepts = coefficients.GetRow(-1)
                });
            }

            return(new MultivariateLinearRegression()
            {
                Weights = coefficients,
            });
        }