/// <summary>Generate Linear Regression model based on a set of examples.</summary>
        /// <param name="x">The Matrix to process.</param>
        /// <param name="y">The Vector to process.</param>
        /// <returns>Model.</returns>
        public override IModel Generate(Matrix x, Vector y)
        {
            // create initial theta
            Vector theta = Vector.Ones(x.Cols + 1);
            Matrix copy  = x.Copy();

            // normalise features
            for (int i = 0; i < copy.Cols; i++)
            {
                var j = FeatureNormalizer.FeatureScale(copy[i, VectorType.Col]);
                for (int k = 0; k < copy.Rows; k++)
                {
                    copy[k, i] = j[k];
                }
            }

            // add intercept term
            copy = copy.Insert(Vector.Ones(copy.Rows), 0, VectorType.Col);

            // run gradient descent
            var run = GradientDescent.Run(theta, copy, y, this.MaxIterations, this.LearningRate, new LinearCostFunction(), this.Lambda, new Regularization());

            // once converged create model and apply theta

            LinearRegressionModel model = new LinearRegressionModel(x.Mean(VectorType.Row), x.StdDev(VectorType.Row))
            {
                Descriptor = this.Descriptor,
                Theta      = run.Item2
            };

            return(model);
        }
示例#2
0
        /// <summary>
        ///     TODO The normalise.
        /// </summary>
        /// <param name="y">
        ///     TODO The y.
        /// </param>
        /// <returns>
        /// </returns>
        private Vector Normalise(Vector y)
        {
            for (var i = 0; i < y.Length; i++)
            {
                y[i] = FeatureNormalizer.FeatureScale(y[i], this.FeatureAverages[i], this.FeatureStandardDeviations[i]);
            }

            return(y.Insert(0, 1.0d));
        }