Ejemplo n.º 1
0
        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair.</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
        /// </returns>
        public override TModel Learn(TInput[] x, bool[] y, double[] weights = null)
        {
            if (Model == null)
            {
                Model = Create(SupportVectorLearningHelper.GetNumberOfInputs(Kernel, x), Kernel);
            }

            if (w == null)
            {
                w = new double[Model.NumberOfInputs];
            }

            if (eta0 == 0)
            {
                determineEta0(x, y);
            }

            do
            {
                for (int i = 0; i < x.Length; i++)
                {
                    trainOne(x[i], y[i], eta0 / (1 + lambda * eta0 * t));
                    t++;
                }

                convergence.NewValue = evaluateLoss(x, y);
            } while (!convergence.HasConverged);

            Model.Weights        = new double[] { 1.0 };
            Model.SupportVectors = new[] { kernel.CreateVector(w) };
            Model.Threshold      = wBias;

            return(Model);
        }
Ejemplo n.º 2
0
        /// <summary>
        ///   Initializes a new instance of the <see cref="BaseSupportVectorLearning"/> class.
        /// </summary>
        ///
        /// <param name="machine">The machine to be learned.</param>
        /// <param name="inputs">The input data.</param>
        /// <param name="outputs">The corresponding output data.</param>
        ///
        protected BaseSupportVectorLearning(SupportVectorMachine machine, double[][] inputs, int[] outputs)
        {
            // Initial argument checking
            SupportVectorLearningHelper.CheckArgs(machine, inputs, outputs);

            // Machine
            this.machine = machine;

            // Kernel (if applicable)
            KernelSupportVectorMachine ksvm = machine as KernelSupportVectorMachine;

            if (ksvm == null)
            {
                isLinear = true;
                Linear linear = new Linear(0);
                kernel = linear;
            }
            else
            {
                Linear linear = ksvm.Kernel as Linear;
                isLinear = linear != null;
                kernel   = ksvm.Kernel;
            }

            // Learning data
            this.inputs  = inputs;
            this.outputs = outputs;
        }
        //protected virtual TModel Create(int inputs, TKernel kernel)
        //{
        //    return SupportVectorLearningHelper.Create<TModel, TInput, TKernel>(inputs, kernel);
        //}

        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair.</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
        /// </returns>
        public TModel Learn(TInput[] x, double[] y, double[] weights = null)
        {
            if (Model == null)
            {
                Model = Create(SupportVectorLearningHelper.GetNumberOfInputs(x), kernel);
            }

            Model.Kernel = kernel;

            // Initial argument checking
            SupportVectorLearningHelper.CheckArgs(Model, x, y);

            // Learning data
            this.inputs  = x;
            this.outputs = y;

            // Initialization heuristics
            if (useComplexityHeuristic)
            {
                complexity = kernel.EstimateComplexity(inputs);
            }

            C = new double[inputs.Length];
            for (int i = 0; i < outputs.Length; i++)
            {
                C[i] = complexity;
            }

            if (sampleWeights != null)
            {
                for (int i = 0; i < C.Length; i++)
                {
                    C[i] *= sampleWeights[i];
                }
            }

            try
            {
                InnerRun();
            }
            finally
            {
                if (machine != null)
                {
                    // TODO: This block is only necessary to offer compatibility
                    // to code written using previous versions of the framework,
                    // and should be removed after a few releases.
                    machine.SupportVectors  = Model.SupportVectors;
                    machine.Weights         = Model.Weights;
                    machine.Threshold       = Model.Threshold;
                    machine.Kernel          = Model.Kernel;
                    machine.IsProbabilistic = Model.IsProbabilistic;
                }
            }

            return(Model);
        }
        /// <summary>
        ///   Initializes a new instance of a Sequential Minimal Optimization (SMO) algorithm.
        /// </summary>
        ///
        /// <param name="machine">A Support Vector Machine.</param>
        /// <param name="inputs">The input data points as row vectors.</param>
        /// <param name="outputs">The output label for each input point. Values must be either -1 or +1.</param>
        ///
        public SequentialMinimalOptimization(SupportVectorMachine machine,
                                             double[][] inputs, int[] outputs)
        {
            // Initial argument checking
            SupportVectorLearningHelper.CheckArgs(machine, inputs, outputs);


            // Machine
            this.machine = machine;

            // Kernel (if applicable)
            KernelSupportVectorMachine ksvm = machine as KernelSupportVectorMachine;

            if (ksvm == null)
            {
                isLinear = true;
                Linear linear = new Linear();
                kernel = linear;
            }
            else
            {
                Linear linear = ksvm.Kernel as Linear;
                isLinear = linear != null;
                kernel   = ksvm.Kernel;
            }


            // Learning data
            this.inputs  = inputs;
            this.outputs = outputs;

            int samples   = inputs.Length;
            int dimension = inputs[0].Length;

            // Lagrange multipliers
            this.alpha = new double[inputs.Length];

            if (isLinear) // Hyperplane weights
            {
                this.weights = new double[dimension];
            }

            // Error cache
            this.errors = new double[samples];

            // Kernel cache
            this.cacheSize = samples;

            // Index sets
            activeExamples   = new HashSet <int>();
            nonBoundExamples = new HashSet <int>();
            atBoundsExamples = new HashSet <int>();
        }
Ejemplo n.º 5
0
        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair (if supported by the learning algorithm).</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
        /// </returns>
        public TModel Learn(TInput[] x, bool[] y, double[] weights = null)
        {
            SupportVectorLearningHelper.CheckArgs(x, y);

            if (machine == null)
            {
                this.machine = Create(SupportVectorLearningHelper.GetNumberOfInputs(kernel, x), Kernel);
            }

            InnerRun();

            return(machine);
        }
        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair (if supported by the learning algorithm).</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
        /// </returns>
        public TModel Learn(TInput[] x, bool[] y, double[] weights = null)
        {
            Accord.MachineLearning.Tools.CheckArgs(x, y, weights, () =>
            {
                if (Model == null)
                {
                    int numberOfInputs = SupportVectorLearningHelper.GetNumberOfInputs(kernel, x);
                    Model = Create(numberOfInputs, Kernel);
                }

                return(Model);
            },

                                                   onlyBinary: true);

            InnerRun();

            return(machine);
        }
Ejemplo n.º 7
0
        /// <summary>
        ///   Constructs a new Least Squares SVM (LS-SVM) learning algorithm.
        /// </summary>
        ///
        /// <param name="machine">A support vector machine.</param>
        /// <param name="inputs">The input data points as row vectors.</param>
        /// <param name="outputs">The output label for each input point. Values must be either -1 or +1.</param>
        ///
        public LeastSquaresLearning(SupportVectorMachine machine, double[][] inputs, int[] outputs)
        {
            SupportVectorLearningHelper.CheckArgs(machine, inputs, outputs);

            // Set the machine
            this.machine = machine;

            // Grab the machine kernel
            KernelSupportVectorMachine ksvm = machine as KernelSupportVectorMachine;

            this.kernel = (ksvm == null) ? new Linear() : ksvm.Kernel;

            // Kernel cache
            this.cacheSize = inputs.Length;

            // Get learning data
            this.inputs  = inputs;
            this.outputs = outputs;

            this.ones = Matrix.Vector(outputs.Length, 1);
        }
Ejemplo n.º 8
0
        //protected virtual TModel Create(int inputs, TKernel kernel)
        //{
        //    return SupportVectorLearningHelper.Create<TModel, TInput, TKernel>(inputs, kernel);
        //}

        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair.</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
        /// </returns>
        public TModel Learn(TInput[] x, double[] y, double[] weights)
        {
            // Initial argument checking
            SupportVectorLearningHelper.CheckArgs(machine, inputs, outputs);

            if (machine == null)
            {
                int numberOfInputs = SupportVectorLearningHelper.GetNumberOfInputs(x);
                this.machine = Create(numberOfInputs, Kernel);
            }

            // Learning data
            this.inputs  = x;
            this.outputs = y;

            // Initialization heuristics
            if (useComplexityHeuristic)
            {
                complexity = kernel.EstimateComplexity(inputs);
            }

            C = new double[inputs.Length];
            for (int i = 0; i < outputs.Length; i++)
            {
                C[i] = complexity;
            }

            if (sampleWeights != null)
            {
                for (int i = 0; i < C.Length; i++)
                {
                    C[i] *= sampleWeights[i];
                }
            }

            InnerRun();

            // Compute error if required.
            return((TModel)machine);
        }
Ejemplo n.º 9
0
        //protected virtual TModel Create(int inputs, TKernel kernel)
        //{
        //    return SupportVectorLearningHelper.Create<TModel, TInput, TKernel>(inputs, kernel);
        //}


        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair.</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
        /// </returns>
        public override TModel Learn(TInput[] x, bool[] y, double[] weights = null)
        {
            bool initialized = false;

            SupportVectorLearningHelper.CheckArgs(x, y);

            if (kernel == null)
            {
                kernel      = SupportVectorLearningHelper.CreateKernel <TKernel, TInput>(x);
                initialized = true;
            }

            if (!initialized && useKernelEstimation)
            {
                kernel = SupportVectorLearningHelper.EstimateKernel(kernel, x);
            }

            if (Model == null)
            {
                Model = Create(SupportVectorLearningHelper.GetNumberOfInputs(kernel, x), kernel);
            }

            Model.Kernel = kernel;

            // Initial argument checking
            SupportVectorLearningHelper.CheckArgs(Model, x, y);


            // Count class prevalence
            int positives, negatives;

            Classes.GetRatio(y, out positives, out negatives);

            // If all examples are positive or negative, terminate
            //   learning early by directly setting the threshold.

            try
            {
                if (positives == 0 || negatives == 0)
                {
                    Model.SupportVectors = new TInput[0];
                    Model.Weights        = new double[0];
                    Model.Threshold      = (positives == 0) ? -1 : +1;
                    return(Model);
                }

                // Initialization heuristics
                if (useComplexityHeuristic)
                {
                    complexity = kernel.EstimateComplexity(x);
                }

                if (useClassLabelProportion)
                {
                    WeightRatio = positives / (double)negatives;
                }

                // Create per sample complexity
                Cpositive = complexity * positiveWeight;
                Cnegative = complexity * negativeWeight;

                Inputs = x;

                C = new double[y.Length];
                for (int i = 0; i < y.Length; i++)
                {
                    C[i] = y[i] ? Cpositive : Cnegative;
                }

                Outputs = new int[y.Length];
                for (int i = 0; i < y.Length; i++)
                {
                    Outputs[i] = y[i] ? 1 : -1;
                }

                if (weights != null)
                {
                    for (int i = 0; i < C.Length; i++)
                    {
                        C[i] *= weights[i];
                    }
                }


                InnerRun();

                SupportVectorLearningHelper.CheckOutput(Model);

                return(Model);
            }
            finally
            {
                if (machine != null)
                {
                    // TODO: This block is only necessary to offer compatibility
                    // to code written using previous versions of the framework,
                    // and should be removed after a few releases.
                    machine.SupportVectors  = Model.SupportVectors;
                    machine.Weights         = Model.Weights;
                    machine.Threshold       = Model.Threshold;
                    machine.Kernel          = Model.Kernel;
                    machine.IsProbabilistic = Model.IsProbabilistic;
                }
            }
        }
        /// <summary>
        /// Learns a model that can map the given inputs to the desired outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="weights">The weight of importance for each input sample.</param>
        /// <returns>
        /// A model that has learned how to produce suitable outputs
        /// given the input data <paramref name="x" />.
        /// </returns>
        public TModel Learn(TInput[] x, double[] weights = null)
        {
            bool initialized = false;

            SupportVectorLearningHelper.CheckArgs(x);

            if (kernel == null)
            {
                kernel      = SupportVectorLearningHelper.CreateKernel <TKernel, TInput>(x);
                initialized = true;
            }

            if (!initialized && useKernelEstimation)
            {
                kernel = SupportVectorLearningHelper.EstimateKernel(kernel, x);
            }

            if (Model == null)
            {
                Model = Create(SupportVectorLearningHelper.GetNumberOfInputs(kernel, x), kernel);
            }

            Model.Kernel = kernel;

            try
            {
                this.inputs = x;
                double[] zeros = new double[inputs.Length];
                int[]    ones  = Vector.Ones <int>(inputs.Length);
                this.alpha = Vector.Ones <double>(inputs.Length);

                int l = inputs.Length;
                int n = (int)(nu * l);  // # of alpha's at upper bound

                for (int i = 0; i < n; i++)
                {
                    alpha[i] = 1;
                }

                if (n < inputs.Length)
                {
                    alpha[n] = nu * l - n;
                }

                for (int i = n + 1; i < l; i++)
                {
                    alpha[i] = 0;
                }

                Func <int, int[], int, double[], double[]> Q = (int i, int[] indices, int length, double[] row) =>
                {
                    for (int j = 0; j < length; j++)
                    {
                        row[j] = Kernel.Function(x[i], x[indices[j]]);
                    }
                    return(row);
                };

                var s = new FanChenLinQuadraticOptimization(alpha.Length, Q, zeros, ones)
                {
                    Tolerance = eps,
                    Shrinking = this.shrinking,
                    Solution  = alpha,
                    Token     = Token
                };

                bool success = s.Minimize();

                int sv = 0;
                for (int i = 0; i < alpha.Length; i++)
                {
                    if (alpha[i] > 0)
                    {
                        sv++;
                    }
                }

                Model.SupportVectors = new TInput[sv];
                Model.Weights        = new double[sv];

                for (int i = 0, j = 0; i < alpha.Length; i++)
                {
                    if (alpha[i] > 0)
                    {
                        Model.SupportVectors[j] = inputs[i];
                        Model.Weights[j]        = alpha[i];
                        j++;
                    }
                }

                Model.Threshold = s.Rho;

                if (success == false)
                {
                    throw new ConvergenceException("Convergence could not be attained. " +
                                                   "Please reduce the cost of misclassification errors by reducing " +
                                                   "the complexity parameter C or try a different kernel function.");
                }
            }
            finally
            {
                if (machine != null)
                {
                    // TODO: This block is only necessary to offer compatibility
                    // to code written using previous versions of the framework,
                    // and should be removed after a few releases.
                    machine.SupportVectors  = Model.SupportVectors;
                    machine.Weights         = Model.Weights;
                    machine.Threshold       = Model.Threshold;
                    machine.Kernel          = Model.Kernel;
                    machine.IsProbabilistic = Model.IsProbabilistic;
                }
            }

            return(Model);
        }
Ejemplo n.º 11
0
        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair (if supported by the learning algorithm).</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
        /// </returns>
        public override TModel Learn(TInput[] x, bool[] y, double[] weights = null)
        {
            if (weights != null)
            {
                throw new ArgumentException(Accord.Properties.Resources.NotSupportedWeights, "weights");
            }

            if (Model == null)
            {
                Model = Create(SupportVectorLearningHelper.GetNumberOfInputs(Kernel, x), Kernel);
            }

            if (w == null)
            {
                w = new double[Model.NumberOfInputs];
                a = new double[Model.NumberOfInputs];
            }

            if (eta0 == 0)
            {
                determineEta0(x, y);
            }

            if (this.tstart == 0)
            {
                this.tstart = Model.NumberOfInputs;
            }

            do
            {
                Trace.WriteLine("Epoch " + (convergence.CurrentIteration + 1));

                Trace.WriteLine(" - Learning");
                var t1 = Stopwatch.StartNew();

                for (int i = 0; i < x.Length; i++)
                {
                    double eta = eta0 / Math.Pow(1 + lambda * eta0 * t, 0.75);
                    double mu  = (t <= tstart) ? 1.0 : mu0 / (1 + mu0 * (t - tstart));
                    trainOne(x[i], y[i], eta, mu);
                    t++;
                }

                t1.Stop();

                Trace.WriteLine(" -- wNorm: " + wnorm());
                Trace.WriteLine(" -- wBias: " + wBias);
                Trace.WriteLine(" -- aNorm: " + anorm());
                Trace.WriteLine(" -- aBias: " + aBias);
                Trace.WriteLine(" -- epoch done in " + t1.Elapsed);

                double loss = 0;
                if (convergence.Tolerance > 0)
                {
                    Trace.WriteLine("- Computing loss");
                    t1   = Stopwatch.StartNew();
                    loss = evaluateLoss(x, y, x.Length);
                    Trace.WriteLine(" -- loss done in " + t1.Elapsed);
                }

                // Check if it has converged
                convergence.NewValue = loss;
            } while (!convergence.HasConverged);

            Model.Weights        = new double[] { 1.0 };
            Model.SupportVectors = new[] { kernel.CreateVector(w) };
            Model.Threshold      = wBias;

            return(Model);
        }
        //protected virtual TModel Create(int inputs, TKernel kernel)
        //{
        //    return SupportVectorLearningHelper.Create<TModel, TInput, TKernel>(inputs, kernel);
        //}

        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair (if supported by the learning algorithm).</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
        /// </returns>
        public TModel Learn(TInput[] x, double[] y, double[] weights = null)
        {
            Accord.MachineLearning.Tools.CheckArgs(x, y, weights, () =>
            {
                bool initialized = false;

                if (kernel == null)
                {
                    kernel      = SupportVectorLearningHelper.CreateKernel <TKernel, TInput>(x);
                    initialized = true;
                }

                if (!initialized)
                {
                    if (useKernelEstimation)
                    {
                        kernel = SupportVectorLearningHelper.EstimateKernel(kernel, x);
                    }
                    else
                    {
                        if (!hasKernelBeenSet)
                        {
                            Trace.TraceWarning("The Kernel property has not been set and the UseKernelEstimation property is set to false. Please" +
                                               " make sure that the default parameters of the kernel are suitable for your application, otherwise the learning" +
                                               " will result in a model with very poor performance.");
                        }
                    }
                }

                if (Model == null)
                {
                    Model = Create(SupportVectorLearningHelper.GetNumberOfInputs(kernel, x), kernel);
                }

                Model.Kernel = kernel;
                return(Model);
            });

            // Learning data
            this.inputs  = x;
            this.outputs = y;

            try
            {
                // Initialization heuristics
                if (useComplexityHeuristic)
                {
                    complexity = kernel.EstimateComplexity(inputs);
                }

                C = new double[inputs.Length];
                for (int i = 0; i < outputs.Length; i++)
                {
                    C[i] = complexity;
                }

                if (sampleWeights != null)
                {
                    for (int i = 0; i < C.Length; i++)
                    {
                        C[i] *= sampleWeights[i];
                    }
                }


                InnerRun();

                SupportVectorLearningHelper.CheckOutput(Model);

                return(Model);
            }
            finally
            {
                if (machine != null)
                {
                    // TODO: This block is only necessary to offer compatibility
                    // to code written using previous versions of the framework,
                    // and should be removed after a few releases.
                    machine.SupportVectors  = Model.SupportVectors;
                    machine.Weights         = Model.Weights;
                    machine.Threshold       = Model.Threshold;
                    machine.Kernel          = Model.Kernel;
                    machine.IsProbabilistic = Model.IsProbabilistic;
                }
            }
        }
Ejemplo n.º 13
0
        //protected virtual TModel Create(int inputs, TKernel kernel)
        //{
        //    return SupportVectorLearningHelper.Create<TModel, TInput, TKernel>(inputs, kernel);
        //}


        /// <summary>
        /// Learns a model that can map the given inputs to the given outputs.
        /// </summary>
        /// <param name="x">The model inputs.</param>
        /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param>
        /// <param name="weights">The weight of importance for each input-output pair (if supported by the learning algorithm).</param>
        /// <returns>
        /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />.
        /// </returns>
        public override TModel Learn(TInput[] x, bool[] y, double[] weights = null)
        {
            Accord.MachineLearning.Tools.CheckArgs(x, y, weights, () =>
            {
                bool initialized = false;

                if (kernel == null)
                {
                    kernel      = SupportVectorLearningHelper.CreateKernel <TKernel, TInput>(x);
                    initialized = true;
                }

                if (!initialized)
                {
                    if (useKernelEstimation)
                    {
                        kernel = SupportVectorLearningHelper.EstimateKernel(kernel, x);
                    }
                    else
                    {
                        if (!hasKernelBeenSet)
                        {
                            Trace.TraceWarning("The Kernel property has not been set and the UseKernelEstimation property is set to false. Please" +
                                               " make sure that the default parameters of the kernel are suitable for your application, otherwise the learning" +
                                               " will result in a model with very poor performance.");
                        }
                    }
                }

                if (Model == null)
                {
                    Model = Create(SupportVectorLearningHelper.GetNumberOfInputs(kernel, x), kernel);
                }

                Model.Kernel = kernel;
                return(Model);
            },

                                                   onlyBinary: true);


            // Count class prevalence
            int positives, negatives;

            Classes.GetRatio(y, out positives, out negatives);

            // If all examples are positive or negative, terminate
            //   learning early by directly setting the threshold.

            try
            {
                if (positives == 0 || negatives == 0)
                {
                    Model.SupportVectors = new TInput[0];
                    Model.Weights        = new double[0];
                    Model.Threshold      = (positives == 0) ? -1 : +1;
                    return(Model);
                }

                // Initialization heuristics
                if (useComplexityHeuristic)
                {
                    complexity = kernel.EstimateComplexity(x);
                }

                if (useClassLabelProportion)
                {
                    WeightRatio = positives / (double)negatives;
                }

                // Create per sample complexity
                Cpositive = complexity * positiveWeight;
                Cnegative = complexity * negativeWeight;

                Inputs = x;

                C = new double[y.Length];
                for (int i = 0; i < y.Length; i++)
                {
                    C[i] = y[i] ? Cpositive : Cnegative;
                }

                Outputs = new int[y.Length];
                for (int i = 0; i < y.Length; i++)
                {
                    Outputs[i] = y[i] ? 1 : -1;
                }

                if (weights != null)
                {
                    for (int i = 0; i < C.Length; i++)
                    {
                        C[i] *= weights[i];
                    }
                }


                InnerRun();

                SupportVectorLearningHelper.CheckOutput(Model);

                return(Model);
            }
            finally
            {
                if (machine != null)
                {
                    // TODO: This block is only necessary to offer compatibility
                    // to code written using previous versions of the framework,
                    // and should be removed after a few releases.
                    machine.SupportVectors  = Model.SupportVectors;
                    machine.Weights         = Model.Weights;
                    machine.Threshold       = Model.Threshold;
                    machine.Kernel          = Model.Kernel;
                    machine.IsProbabilistic = Model.IsProbabilistic;
                }
            }
        }