Linear Regression model
Inheritance: Model
        /// <summary>Generate Linear Regression model based on a set of examples.</summary>
        /// <param name="x">The Matrix to process.</param>
        /// <param name="y">The Vector to process.</param>
        /// <returns>Model.</returns>
        public override IModel Generate(Matrix x, Vector y)
        {
            // create initial theta
            Vector theta = Vector.Ones(x.Cols + 1);
            Matrix copy  = x.Copy();

            // normalise features
            for (int i = 0; i < copy.Cols; i++)
            {
                var j = FeatureNormalizer.FeatureScale(copy[i, VectorType.Col]);
                for (int k = 0; k < copy.Rows; k++)
                {
                    copy[k, i] = j[k];
                }
            }

            // add intercept term
            copy = copy.Insert(Vector.Ones(copy.Rows), 0, VectorType.Col);

            // run gradient descent
            var run = GradientDescent.Run(theta, copy, y, this.MaxIterations, this.LearningRate, new LinearCostFunction(), this.Lambda, new Regularization());

            // once converged create model and apply theta

            LinearRegressionModel model = new LinearRegressionModel(x.Mean(VectorType.Row), x.StdDev(VectorType.Row))
            {
                Descriptor = this.Descriptor,
                Theta      = run.Item2
            };

            return(model);
        }
        /// <summary>Generate Linear Regression model based on a set of examples.</summary>
        /// <param name="x">The Matrix to process.</param>
        /// <param name="y">The Vector to process.</param>
        /// <returns>Model.</returns>
        public override IModel Generate(Matrix x, Vector y)
        {
            // create initial theta
            Vector theta = Vector.Ones(x.Cols + 1);
            Matrix copy = x.Copy();

            // normalise features
            for (int i = 0; i < copy.Cols; i++)
            {
                var j = FeatureNormalizer.FeatureScale(copy[i, VectorType.Col]);
                for (int k = 0; k < copy.Rows; k++)
                {
                    copy[k, i] = j[k];
                }
            }

            // add intercept term
            copy = copy.Insert(Vector.Ones(copy.Rows), 0, VectorType.Col);

            // run gradient descent
            var run = GradientDescent.Run(theta, copy, y, MaxIterations, LearningRate, new LinearCostFunction(),
                Lambda, new Regularization());

            // once converged create model and apply theta

            LinearRegressionModel model = new LinearRegressionModel(x.Mean(VectorType.Row), x.StdDev(VectorType.Row))
            {
                Descriptor = Descriptor,
                Theta = run.Item2
            };

            return model;
        }
Exemple #3
0
        /// <summary>Generate Linear Regression model based on a set of examples.</summary>
        /// <param name="X">The Matrix to process.</param>
        /// <param name="y">The Vector to process.</param>
        /// <returns>Model.</returns>
        public override IModel Generate(Matrix X, Vector y)
        {
            Preprocess(X);

            // copy matrix
            var copy = X.Copy();

            // add intercept term
            copy = copy.Insert(Vector.Ones(copy.Rows), 0, VectorType.Col);

            // create initial theta
            var theta = Vector.Rand(copy.Cols);

            // run gradient descent
            var optimizer = new Optimizer(theta, MaxIterations, LearningRate)
            {
                CostFunction = new LinearCostFunction
                {
                    X           = copy,
                    Y           = y,
                    Lambda      = Lambda,
                    Regularizer = new L2Regularizer()
                }
            };

            optimizer.Run();

            // once converged create model and apply theta

            var model = new LinearRegressionModel
            {
                Descriptor        = Descriptor,
                NormalizeFeatures = NormalizeFeatures,
                FeatureNormalizer = FeatureNormalizer,
                FeatureProperties = FeatureProperties,
                Theta             = optimizer.Properties.Theta
            };

            return(model);
        }
        /// <summary>Generate Linear Regression model based on a set of examples.</summary>
        /// <param name="X">The Matrix to process.</param>
        /// <param name="y">The Vector to process.</param>
        /// <returns>Model.</returns>
        public override IModel Generate(Matrix X, Vector y)
        {
            this.Preprocess(X);

            // copy matrix
            Matrix copy = X.Copy();

            // add intercept term
            copy = copy.Insert(Vector.Ones(copy.Rows), 0, VectorType.Col);

            // create initial theta
            Vector theta = Vector.Rand(copy.Cols);

            // run gradient descent
            var optimizer = new numl.Math.Optimization.Optimizer(theta, this.MaxIterations, this.LearningRate)
            {
                CostFunction = new numl.Math.Functions.Cost.LinearCostFunction()
                {
                    X           = copy,
                    Y           = y,
                    Lambda      = this.Lambda,
                    Regularizer = new numl.Math.Functions.Regularization.L2Regularizer()
                }
            };

            optimizer.Run();

            // once converged create model and apply theta

            LinearRegressionModel model = new LinearRegressionModel()
            {
                Descriptor        = this.Descriptor,
                NormalizeFeatures = base.NormalizeFeatures,
                FeatureNormalizer = base.FeatureNormalizer,
                FeatureProperties = base.FeatureProperties,
                Theta             = optimizer.Properties.Theta
            };

            return(model);
        }
        public void Load_LinearRegression()
        {
            string xml = @"<?xml version=""1.0""?>
<LinearRegressionModel>
  <Descriptor Type=""None"" Name="""">
    <Features Length=""2"">
      <Property Name=""LeftOperand"" Type=""Double"" Discrete=""False"" Start=""0"" />
      <Property Name=""RightOperand"" Type=""Double"" Discrete=""False"" Start=""1"" />
    </Features>
    <Label>
      <Property Name=""Result"" Type=""Double"" Discrete=""False"" Start=""-1"" />
    </Label>
  </Descriptor>
  <v size=""3"">
    <e>73299.802339155649</e>
    <e>13929.858323609986</e>
    <e>28235.048808708329</e>
  </v>
  <v size=""2"">
    <e>22155.108339050836</e>
    <e>25812.304093938921</e>
  </v>
  <v size=""2"">
    <e>14120.242563388447</e>
    <e>14302.3670376599</e>
  </v>
</LinearRegressionModel>";

            LinearRegressionModel model = new LinearRegressionModel();
            model.LoadXml(xml);
        }
        /// <summary>Generate Linear Regression model based on a set of examples.</summary>
        /// <param name="X">The Matrix to process.</param>
        /// <param name="y">The Vector to process.</param>
        /// <returns>Model.</returns>
        public override IModel Generate(Matrix X, Vector y)
        {
            this.Preprocess(X);

            // copy matrix
            Matrix copy = X.Copy();

            // add intercept term
            copy = copy.Insert(Vector.Ones(copy.Rows), 0, VectorType.Col);

            // create initial theta
            Vector theta = Vector.Rand(copy.Cols);

            // run gradient descent
            var optimizer = new numl.Math.Optimization.Optimizer(theta, this.MaxIterations, this.LearningRate)
            {
                CostFunction = new numl.Math.Functions.Cost.LinearCostFunction()
                {
                    X = copy,
                    Y = y,
                    Lambda = this.Lambda,
                    Regularizer = new numl.Math.Functions.Regularization.L2Regularizer()
                }
            };

            optimizer.Run();

            // once converged create model and apply theta

            LinearRegressionModel model = new LinearRegressionModel()
            {
                Descriptor = this.Descriptor,
                NormalizeFeatures = base.NormalizeFeatures,
                FeatureNormalizer = base.FeatureNormalizer,
                FeatureProperties = base.FeatureProperties,
                Theta = optimizer.Properties.Theta
            };

            return model;
        }