示例#1
0
        //protected virtual TModel Create(int inputs, TKernel kernel)
        //{
        //    return SupportVectorLearningHelper.Create<TModel, TInput, TKernel>(inputs, kernel);
        //}


        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair.</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
        /// </returns>
        public override TModel Learn(TInput[] x, bool[] y, double[] weights = null)
        {
            bool initialized = false;

            SupportVectorLearningHelper.CheckArgs(x, y);

            if (kernel == null)
            {
                kernel      = SupportVectorLearningHelper.CreateKernel <TKernel, TInput>(x);
                initialized = true;
            }

            if (!initialized && useKernelEstimation)
            {
                kernel = SupportVectorLearningHelper.EstimateKernel(kernel, x);
            }

            if (Model == null)
            {
                Model = Create(SupportVectorLearningHelper.GetNumberOfInputs(kernel, x), kernel);
            }

            Model.Kernel = kernel;

            // Initial argument checking
            SupportVectorLearningHelper.CheckArgs(Model, x, y);


            // Count class prevalence
            int positives, negatives;

            Classes.GetRatio(y, out positives, out negatives);

            // If all examples are positive or negative, terminate
            //   learning early by directly setting the threshold.

            try
            {
                if (positives == 0 || negatives == 0)
                {
                    Model.SupportVectors = new TInput[0];
                    Model.Weights        = new double[0];
                    Model.Threshold      = (positives == 0) ? -1 : +1;
                    return(Model);
                }

                // Initialization heuristics
                if (useComplexityHeuristic)
                {
                    complexity = kernel.EstimateComplexity(x);
                }

                if (useClassLabelProportion)
                {
                    WeightRatio = positives / (double)negatives;
                }

                // Create per sample complexity
                Cpositive = complexity * positiveWeight;
                Cnegative = complexity * negativeWeight;

                Inputs = x;

                C = new double[y.Length];
                for (int i = 0; i < y.Length; i++)
                {
                    C[i] = y[i] ? Cpositive : Cnegative;
                }

                Outputs = new int[y.Length];
                for (int i = 0; i < y.Length; i++)
                {
                    Outputs[i] = y[i] ? 1 : -1;
                }

                if (weights != null)
                {
                    for (int i = 0; i < C.Length; i++)
                    {
                        C[i] *= weights[i];
                    }
                }


                InnerRun();

                SupportVectorLearningHelper.CheckOutput(Model);

                return(Model);
            }
            finally
            {
                if (machine != null)
                {
                    // TODO: This block is only necessary to offer compatibility
                    // to code written using previous versions of the framework,
                    // and should be removed after a few releases.
                    machine.SupportVectors  = Model.SupportVectors;
                    machine.Weights         = Model.Weights;
                    machine.Threshold       = Model.Threshold;
                    machine.Kernel          = Model.Kernel;
                    machine.IsProbabilistic = Model.IsProbabilistic;
                }
            }
        }
        /// <summary>
        /// Learns a model that can map the given inputs to the desired outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="weights">The weight of importance for each input sample.</param>
        /// <returns>
        /// A model that has learned how to produce suitable outputs
        /// given the input data <paramref name="x" />.
        /// </returns>
        public TModel Learn(TInput[] x, double[] weights = null)
        {
            bool initialized = false;

            SupportVectorLearningHelper.CheckArgs(x);

            if (kernel == null)
            {
                kernel      = SupportVectorLearningHelper.CreateKernel <TKernel, TInput>(x);
                initialized = true;
            }

            if (!initialized && useKernelEstimation)
            {
                kernel = SupportVectorLearningHelper.EstimateKernel(kernel, x);
            }

            if (Model == null)
            {
                Model = Create(SupportVectorLearningHelper.GetNumberOfInputs(kernel, x), kernel);
            }

            Model.Kernel = kernel;

            try
            {
                this.inputs = x;
                double[] zeros = new double[inputs.Length];
                int[]    ones  = Vector.Ones <int>(inputs.Length);
                this.alpha = Vector.Ones <double>(inputs.Length);

                int l = inputs.Length;
                int n = (int)(nu * l);  // # of alpha's at upper bound

                for (int i = 0; i < n; i++)
                {
                    alpha[i] = 1;
                }

                if (n < inputs.Length)
                {
                    alpha[n] = nu * l - n;
                }

                for (int i = n + 1; i < l; i++)
                {
                    alpha[i] = 0;
                }

                Func <int, int[], int, double[], double[]> Q = (int i, int[] indices, int length, double[] row) =>
                {
                    for (int j = 0; j < length; j++)
                    {
                        row[j] = Kernel.Function(x[i], x[indices[j]]);
                    }
                    return(row);
                };

                var s = new FanChenLinQuadraticOptimization(alpha.Length, Q, zeros, ones)
                {
                    Tolerance = eps,
                    Shrinking = this.shrinking,
                    Solution  = alpha,
                    Token     = Token
                };

                bool success = s.Minimize();

                int sv = 0;
                for (int i = 0; i < alpha.Length; i++)
                {
                    if (alpha[i] > 0)
                    {
                        sv++;
                    }
                }

                Model.SupportVectors = new TInput[sv];
                Model.Weights        = new double[sv];

                for (int i = 0, j = 0; i < alpha.Length; i++)
                {
                    if (alpha[i] > 0)
                    {
                        Model.SupportVectors[j] = inputs[i];
                        Model.Weights[j]        = alpha[i];
                        j++;
                    }
                }

                Model.Threshold = s.Rho;

                if (success == false)
                {
                    throw new ConvergenceException("Convergence could not be attained. " +
                                                   "Please reduce the cost of misclassification errors by reducing " +
                                                   "the complexity parameter C or try a different kernel function.");
                }
            }
            finally
            {
                if (machine != null)
                {
                    // TODO: This block is only necessary to offer compatibility
                    // to code written using previous versions of the framework,
                    // and should be removed after a few releases.
                    machine.SupportVectors  = Model.SupportVectors;
                    machine.Weights         = Model.Weights;
                    machine.Threshold       = Model.Threshold;
                    machine.Kernel          = Model.Kernel;
                    machine.IsProbabilistic = Model.IsProbabilistic;
                }
            }

            return(Model);
        }
        //protected virtual TModel Create(int inputs, TKernel kernel)
        //{
        //    return SupportVectorLearningHelper.Create<TModel, TInput, TKernel>(inputs, kernel);
        //}

        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair (if supported by the learning algorithm).</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
        /// </returns>
        public TModel Learn(TInput[] x, double[] y, double[] weights = null)
        {
            Accord.MachineLearning.Tools.CheckArgs(x, y, weights, () =>
            {
                bool initialized = false;

                if (kernel == null)
                {
                    kernel      = SupportVectorLearningHelper.CreateKernel <TKernel, TInput>(x);
                    initialized = true;
                }

                if (!initialized)
                {
                    if (useKernelEstimation)
                    {
                        kernel = SupportVectorLearningHelper.EstimateKernel(kernel, x);
                    }
                    else
                    {
                        if (!hasKernelBeenSet)
                        {
                            Trace.TraceWarning("The Kernel property has not been set and the UseKernelEstimation property is set to false. Please" +
                                               " make sure that the default parameters of the kernel are suitable for your application, otherwise the learning" +
                                               " will result in a model with very poor performance.");
                        }
                    }
                }

                if (Model == null)
                {
                    Model = Create(SupportVectorLearningHelper.GetNumberOfInputs(kernel, x), kernel);
                }

                Model.Kernel = kernel;
                return(Model);
            });

            // Learning data
            this.inputs  = x;
            this.outputs = y;

            try
            {
                // Initialization heuristics
                if (useComplexityHeuristic)
                {
                    complexity = kernel.EstimateComplexity(inputs);
                }

                C = new double[inputs.Length];
                for (int i = 0; i < outputs.Length; i++)
                {
                    C[i] = complexity;
                }

                if (sampleWeights != null)
                {
                    for (int i = 0; i < C.Length; i++)
                    {
                        C[i] *= sampleWeights[i];
                    }
                }


                InnerRun();

                SupportVectorLearningHelper.CheckOutput(Model);

                return(Model);
            }
            finally
            {
                if (machine != null)
                {
                    // TODO: This block is only necessary to offer compatibility
                    // to code written using previous versions of the framework,
                    // and should be removed after a few releases.
                    machine.SupportVectors  = Model.SupportVectors;
                    machine.Weights         = Model.Weights;
                    machine.Threshold       = Model.Threshold;
                    machine.Kernel          = Model.Kernel;
                    machine.IsProbabilistic = Model.IsProbabilistic;
                }
            }
        }
        //protected virtual TModel Create(int inputs, TKernel kernel)
        //{
        //    return SupportVectorLearningHelper.Create<TModel, TInput, TKernel>(inputs, kernel);
        //}

        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair.</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
        /// </returns>
        public TModel Learn(TInput[] x, double[] y, double[] weights = null)
        {
            bool initialized = false;

            SupportVectorLearningHelper.CheckArgs(x, y);

            if (kernel == null)
            {
                kernel      = SupportVectorLearningHelper.CreateKernel <TKernel, TInput>(x);
                initialized = true;
            }

            if (!initialized && useKernelEstimation)
            {
                kernel = SupportVectorLearningHelper.EstimateKernel(kernel, x);
            }

            if (Model == null)
            {
                Model = Create(SupportVectorLearningHelper.GetNumberOfInputs(kernel, x), kernel);
            }

            Model.Kernel = kernel;

            // Initial argument checking
            SupportVectorLearningHelper.CheckArgs(Model, x, y);

            // Learning data
            this.inputs  = x;
            this.outputs = y;

            try
            {
                // Initialization heuristics
                if (useComplexityHeuristic)
                {
                    complexity = kernel.EstimateComplexity(inputs);
                }

                C = new double[inputs.Length];
                for (int i = 0; i < outputs.Length; i++)
                {
                    C[i] = complexity;
                }

                if (sampleWeights != null)
                {
                    for (int i = 0; i < C.Length; i++)
                    {
                        C[i] *= sampleWeights[i];
                    }
                }


                InnerRun();

                SupportVectorLearningHelper.CheckOutput(Model);

                return(Model);
            }
            finally
            {
                if (machine != null)
                {
                    // TODO: This block is only necessary to offer compatibility
                    // to code written using previous versions of the framework,
                    // and should be removed after a few releases.
                    machine.SupportVectors  = Model.SupportVectors;
                    machine.Weights         = Model.Weights;
                    machine.Threshold       = Model.Threshold;
                    machine.Kernel          = Model.Kernel;
                    machine.IsProbabilistic = Model.IsProbabilistic;
                }
            }
        }
示例#5
0
        //protected virtual TModel Create(int inputs, TKernel kernel)
        //{
        //    return SupportVectorLearningHelper.Create<TModel, TInput, TKernel>(inputs, kernel);
        //}


        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair (if supported by the learning algorithm).</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
        /// </returns>
        public override TModel Learn(TInput[] x, bool[] y, double[] weights = null)
        {
            Accord.MachineLearning.Tools.CheckArgs(x, y, weights, () =>
            {
                bool initialized = false;

                if (kernel == null)
                {
                    kernel      = SupportVectorLearningHelper.CreateKernel <TKernel, TInput>(x);
                    initialized = true;
                }

                if (!initialized)
                {
                    if (useKernelEstimation)
                    {
                        kernel = SupportVectorLearningHelper.EstimateKernel(kernel, x);
                    }
                    else
                    {
                        if (!hasKernelBeenSet)
                        {
                            Trace.TraceWarning("The Kernel property has not been set and the UseKernelEstimation property is set to false. Please" +
                                               " make sure that the default parameters of the kernel are suitable for your application, otherwise the learning" +
                                               " will result in a model with very poor performance.");
                        }
                    }
                }

                if (Model == null)
                {
                    Model = Create(SupportVectorLearningHelper.GetNumberOfInputs(kernel, x), kernel);
                }

                Model.Kernel = kernel;
                return(Model);
            },

                                                   onlyBinary: true);


            // Count class prevalence
            int positives, negatives;

            Classes.GetRatio(y, out positives, out negatives);

            // If all examples are positive or negative, terminate
            //   learning early by directly setting the threshold.

            try
            {
                if (positives == 0 || negatives == 0)
                {
                    Model.SupportVectors = new TInput[0];
                    Model.Weights        = new double[0];
                    Model.Threshold      = (positives == 0) ? -1 : +1;
                    return(Model);
                }

                // Initialization heuristics
                if (useComplexityHeuristic)
                {
                    complexity = kernel.EstimateComplexity(x);
                }

                if (useClassLabelProportion)
                {
                    WeightRatio = positives / (double)negatives;
                }

                // Create per sample complexity
                Cpositive = complexity * positiveWeight;
                Cnegative = complexity * negativeWeight;

                Inputs = x;

                C = new double[y.Length];
                for (int i = 0; i < y.Length; i++)
                {
                    C[i] = y[i] ? Cpositive : Cnegative;
                }

                Outputs = new int[y.Length];
                for (int i = 0; i < y.Length; i++)
                {
                    Outputs[i] = y[i] ? 1 : -1;
                }

                if (weights != null)
                {
                    for (int i = 0; i < C.Length; i++)
                    {
                        C[i] *= weights[i];
                    }
                }


                InnerRun();

                SupportVectorLearningHelper.CheckOutput(Model);

                return(Model);
            }
            finally
            {
                if (machine != null)
                {
                    // TODO: This block is only necessary to offer compatibility
                    // to code written using previous versions of the framework,
                    // and should be removed after a few releases.
                    machine.SupportVectors  = Model.SupportVectors;
                    machine.Weights         = Model.Weights;
                    machine.Threshold       = Model.Threshold;
                    machine.Kernel          = Model.Kernel;
                    machine.IsProbabilistic = Model.IsProbabilistic;
                }
            }
        }