Пример #1
0
        /// <summary>Generate model based on a set of examples.</summary>
        /// <param name="x">The Matrix to process.</param>
        /// <param name="y">The Vector to process.</param>
        /// <returns>Model.</returns>
        public override IModel Generate(Matrix x, Vector y)
        {
            // because I said so...
            if (this.MaxIterations == -1)
            {
                this.MaxIterations = x.Rows * 1000;
            }

            var network = Network.Default(this.Descriptor, x, y, this.Activation);
            var model = new NeuralNetworkModel { Descriptor = this.Descriptor, Network = network };
            this.OnModelChanged(this, ModelEventArgs.Make(model, "Initialized"));

            for (var i = 0; i < this.MaxIterations; i++)
            {
                var idx = i % x.Rows;
                network.Forward(x[idx, VectorType.Row]);

                // OnModelChanged(this, ModelEventArgs.Make(model, "Forward"));
                network.Back(y[idx], this.LearningRate);
                var output = string.Format("Run ({0}/{1})", i, this.MaxIterations);
                this.OnModelChanged(this, ModelEventArgs.Make(model, output));
            }

            return model;
        }
Пример #2
0
        /// <summary>Estimates.</summary>
        /// <param name="X">The Matrix to process.</param>
        /// <param name="type">(Optional) the type.</param>
        public void Estimate(Matrix X, VectorType type = VectorType.Row)
        {
            var n = type == VectorType.Row ? X.Rows : X.Cols;
            var s = type == VectorType.Row ? X.Cols : X.Rows;
            this.Mu = X.Sum(type) / n;
            this.Sigma = Matrix.Zeros(s);

            for (var i = 0; i < n; i++)
            {
                var x = X[i, type] - this.Mu;
                this.Sigma += x.Outer(x);
            }

            this.Sigma *= 1d / (n - 1d);
        }
Пример #3
0
        /// <summary>
        ///     Compute the error cost of the given Theta parameter for the training and label sets
        /// </summary>
        /// <param name="theta">Learning Theta parameters</param>
        /// <param name="X">Training set</param>
        /// <param name="y">Training labels</param>
        /// <param name="lambda">Regularization constant</param>
        /// <param name="regularizer">Regularization term function.</param>
        /// <returns></returns>
        public double ComputeCost(Vector theta, Matrix X, Vector y, double lambda, IRegularizer regularizer)
        {
            var m = X.Rows;

            var j = 0.0;

            var s = (X * theta).ToVector();

            j = 1.0 / (2.0 * m) * ((s - y) ^ 2.0).Sum();

            if (lambda != 0)
            {
                j = regularizer.Regularize(j, theta, m, lambda);
            }

            return j;
        }
Пример #4
0
        /// <summary>
        ///     Compute the error cost of the given Theta parameter for the training and label sets
        /// </summary>
        /// <param name="theta">Learning Theta parameters</param>
        /// <param name="X">Training set</param>
        /// <param name="y">Training labels</param>
        /// <param name="lambda">Regularisation constant</param>
        /// <param name="regularizer">Regularization term function.</param>
        /// <returns></returns>
        public Vector ComputeGradient(Vector theta, Matrix X, Vector y, double lambda, IRegularizer regularizer)
        {
            var m = X.Rows;
            var gradient = Vector.Zeros(theta.Length);

            var s = (X * theta).ToVector();

            for (var i = 0; i < theta.Length; i++)
            {
                gradient[i] = 1.0 / m * ((s - y) * X[i, VectorType.Col]).Sum();
            }

            if (lambda != 0)
            {
                gradient = regularizer.Regularize(theta, gradient, m, lambda);
            }

            return gradient;
        }
        /// <summary>Generate model based on a set of examples.</summary>
        /// <param name="x">The Matrix to process.</param>
        /// <param name="y">The Vector to process.</param>
        /// <returns>Model.</returns>
        public override IModel Generate(Matrix x, Vector y)
        {
            var N = y.Length;
            var a = Vector.Zeros(N);

            // compute kernel
            var K = this.Kernel.Compute(x);

            var n = 1;

            // hopefully enough to converge right? ;)
            // need to be smarter about storing SPD kernels...
            var found_error = true;
            while (n < 500 && found_error)
            {
                found_error = false;
                for (var i = 0; i < N; i++)
                {
                    found_error = y[i] * a.Dot(K[i]) <= 0;
                    if (found_error)
                    {
                        a[i] += y[i];
                    }
                }

                n++;
            }

            // anything that *matters*
            // i.e. support vectors
            var indices = a.Indices(d => d != 0);

            // slice up examples to contain
            // only support vectors
            return new KernelPerceptronModel
                       {
                          Kernel = this.Kernel, A = a.Slice(indices), Y = y.Slice(indices), X = x.Slice(indices)
                       };
        }
        /// <summary>Generate Linear Regression model based on a set of examples.</summary>
        /// <param name="x">The Matrix to process.</param>
        /// <param name="y">The Vector to process.</param>
        /// <returns>Model.</returns>
        public override IModel Generate(Matrix x, Vector y)
        {
            // create initial theta
            var theta = Vector.Ones(x.Cols + 1);
            var copy = x.Copy();

            // normalise features
            for (var i = 0; i < copy.Cols; i++)
            {
                var j = FeatureNormalizer.FeatureScale(copy[i, VectorType.Col]);
                for (var k = 0; k < copy.Rows; k++)
                {
                    copy[k, i] = j[k];
                }
            }

            // add intercept term
            copy = copy.Insert(Vector.Ones(copy.Rows), 0, VectorType.Col);

            // run gradient descent
            var run = GradientDescent.Run(
                theta,
                copy,
                y,
                this.MaxIterations,
                this.LearningRate,
                new LinearCostFunction(),
                this.Lambda,
                new Regularization());

            // once converged create model and apply theta
            var model = new LinearRegressionModel(x.Mean(VectorType.Row), x.StdDev(VectorType.Row))
                            {
                               Descriptor = this.Descriptor, Theta = run.Item2
                            };

            return model;
        }
Пример #7
0
        /// <summary>
        /// Initialization method for performing custom actions prior to being optimized.
        /// </summary>
        public override void Initialize()
        {
            base.Initialize();

            YReformed = Y.Reshape(X.Rows, VectorType.Row);
        }