/// <summary>Generate Linear Regression model based on a set of examples.</summary>
        /// <param name="x">The Matrix to process.</param>
        /// <param name="y">The Vector to process.</param>
        /// <returns>Model.</returns>
        public override IModel Generate(Matrix x, Vector y)
        {
            // create initial theta
            Vector theta = Vector.Ones(x.Cols + 1);
            Matrix copy  = x.Copy();

            // normalise features
            for (int i = 0; i < copy.Cols; i++)
            {
                var j = FeatureNormalizer.FeatureScale(copy[i, VectorType.Col]);
                for (int k = 0; k < copy.Rows; k++)
                {
                    copy[k, i] = j[k];
                }
            }

            // add intercept term
            copy = copy.Insert(Vector.Ones(copy.Rows), 0, VectorType.Col);

            // run gradient descent
            var run = GradientDescent.Run(theta, copy, y, this.MaxIterations, this.LearningRate, new LinearCostFunction(), this.Lambda, new Regularization());

            // once converged create model and apply theta

            LinearRegressionModel model = new LinearRegressionModel(x.Mean(VectorType.Row), x.StdDev(VectorType.Row))
            {
                Descriptor = this.Descriptor,
                Theta      = run.Item2
            };

            return(model);
        }
        /// <summary>Generate Logistic Regression model based on a set of examples.</summary>
        /// <param name="x">The Matrix to process.</param>
        /// <param name="y">The Vector to process.</param>
        /// <returns>Model.</returns>
        public override IModel Generate(Matrix x, Vector y)
        {
            // create initial theta
            Matrix copy = x.Copy();

            copy = PreProcessing.FeatureDimensions.IncreaseDimensions(copy, PolynomialFeatures);

            // add intercept term
            copy = copy.Insert(Vector.Ones(copy.Rows), 0, VectorType.Col);

            Vector theta = Vector.Ones(copy.Cols);

            var run = GradientDescent.Run(theta, copy, y, this.MaxIterations, this.LearningRate, new Functions.CostFunctions.LogisticCostFunction(),
                                          this.Lambda, new Regularization());

            LogisticRegressionModel model = new LogisticRegressionModel()
            {
                Descriptor         = this.Descriptor,
                Theta              = run.Item2,
                LogisticFunction   = new Math.Functions.Logistic(),
                PolynomialFeatures = this.PolynomialFeatures
            };

            return(model);
        }