/// <summary> /// Initializes a new instance of the <see cref="BaseSupportVectorLearning"/> class. /// </summary> /// /// <param name="machine">The machine to be learned.</param> /// <param name="inputs">The input data.</param> /// <param name="outputs">The corresponding output data.</param> /// protected BaseSupportVectorLearning(SupportVectorMachine machine, double[][] inputs, int[] outputs) { // Initial argument checking SupportVectorLearningHelper.CheckArgs(machine, inputs, outputs); // Machine this.machine = machine; // Kernel (if applicable) KernelSupportVectorMachine ksvm = machine as KernelSupportVectorMachine; if (ksvm == null) { isLinear = true; Linear linear = new Linear(0); kernel = linear; } else { Linear linear = ksvm.Kernel as Linear; isLinear = linear != null; kernel = ksvm.Kernel; } // Learning data this.inputs = inputs; this.outputs = outputs; }
//protected virtual TModel Create(int inputs, TKernel kernel) //{ // return SupportVectorLearningHelper.Create<TModel, TInput, TKernel>(inputs, kernel); //} /// <summary> /// Learns a model that can map the given inputs to the given outputs. /// </summary> /// <param name="x">The model inputs.</param> /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param> /// <param name="weights">The weight of importance for each input-output pair.</param> /// <returns> /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />. /// </returns> public TModel Learn(TInput[] x, double[] y, double[] weights = null) { if (Model == null) { Model = Create(SupportVectorLearningHelper.GetNumberOfInputs(x), kernel); } Model.Kernel = kernel; // Initial argument checking SupportVectorLearningHelper.CheckArgs(Model, x, y); // Learning data this.inputs = x; this.outputs = y; // Initialization heuristics if (useComplexityHeuristic) { complexity = kernel.EstimateComplexity(inputs); } C = new double[inputs.Length]; for (int i = 0; i < outputs.Length; i++) { C[i] = complexity; } if (sampleWeights != null) { for (int i = 0; i < C.Length; i++) { C[i] *= sampleWeights[i]; } } try { InnerRun(); } finally { if (machine != null) { // TODO: This block is only necessary to offer compatibility // to code written using previous versions of the framework, // and should be removed after a few releases. machine.SupportVectors = Model.SupportVectors; machine.Weights = Model.Weights; machine.Threshold = Model.Threshold; machine.Kernel = Model.Kernel; machine.IsProbabilistic = Model.IsProbabilistic; } } return(Model); }
/// <summary> /// Initializes a new instance of a Sequential Minimal Optimization (SMO) algorithm. /// </summary> /// /// <param name="machine">A Support Vector Machine.</param> /// <param name="inputs">The input data points as row vectors.</param> /// <param name="outputs">The output label for each input point. Values must be either -1 or +1.</param> /// public SequentialMinimalOptimization(SupportVectorMachine machine, double[][] inputs, int[] outputs) { // Initial argument checking SupportVectorLearningHelper.CheckArgs(machine, inputs, outputs); // Machine this.machine = machine; // Kernel (if applicable) KernelSupportVectorMachine ksvm = machine as KernelSupportVectorMachine; if (ksvm == null) { isLinear = true; Linear linear = new Linear(); kernel = linear; } else { Linear linear = ksvm.Kernel as Linear; isLinear = linear != null; kernel = ksvm.Kernel; } // Learning data this.inputs = inputs; this.outputs = outputs; int samples = inputs.Length; int dimension = inputs[0].Length; // Lagrange multipliers this.alpha = new double[inputs.Length]; if (isLinear) // Hyperplane weights { this.weights = new double[dimension]; } // Error cache this.errors = new double[samples]; // Kernel cache this.cacheSize = samples; // Index sets activeExamples = new HashSet <int>(); nonBoundExamples = new HashSet <int>(); atBoundsExamples = new HashSet <int>(); }
/// <summary> /// Learns a model that can map the given inputs to the given outputs. /// </summary> /// <param name="x">The model inputs.</param> /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param> /// <param name="weights">The weight of importance for each input-output pair (if supported by the learning algorithm).</param> /// <returns> /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />. /// </returns> public TModel Learn(TInput[] x, bool[] y, double[] weights = null) { SupportVectorLearningHelper.CheckArgs(x, y); if (machine == null) { this.machine = Create(SupportVectorLearningHelper.GetNumberOfInputs(kernel, x), Kernel); } InnerRun(); return(machine); }
/// <summary> /// Constructs a new Least Squares SVM (LS-SVM) learning algorithm. /// </summary> /// /// <param name="machine">A support vector machine.</param> /// <param name="inputs">The input data points as row vectors.</param> /// <param name="outputs">The output label for each input point. Values must be either -1 or +1.</param> /// public LeastSquaresLearning(SupportVectorMachine machine, double[][] inputs, int[] outputs) { SupportVectorLearningHelper.CheckArgs(machine, inputs, outputs); // Set the machine this.machine = machine; // Grab the machine kernel KernelSupportVectorMachine ksvm = machine as KernelSupportVectorMachine; this.kernel = (ksvm == null) ? new Linear() : ksvm.Kernel; // Kernel cache this.cacheSize = inputs.Length; // Get learning data this.inputs = inputs; this.outputs = outputs; this.ones = Matrix.Vector(outputs.Length, 1); }
//protected virtual TModel Create(int inputs, TKernel kernel) //{ // return SupportVectorLearningHelper.Create<TModel, TInput, TKernel>(inputs, kernel); //} /// <summary> /// Learns a model that can map the given inputs to the given outputs. /// </summary> /// <param name="x">The model inputs.</param> /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param> /// <param name="weights">The weight of importance for each input-output pair.</param> /// <returns> /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />. /// </returns> public TModel Learn(TInput[] x, double[] y, double[] weights) { // Initial argument checking SupportVectorLearningHelper.CheckArgs(machine, inputs, outputs); if (machine == null) { int numberOfInputs = SupportVectorLearningHelper.GetNumberOfInputs(x); this.machine = Create(numberOfInputs, Kernel); } // Learning data this.inputs = x; this.outputs = y; // Initialization heuristics if (useComplexityHeuristic) { complexity = kernel.EstimateComplexity(inputs); } C = new double[inputs.Length]; for (int i = 0; i < outputs.Length; i++) { C[i] = complexity; } if (sampleWeights != null) { for (int i = 0; i < C.Length; i++) { C[i] *= sampleWeights[i]; } } InnerRun(); // Compute error if required. return((TModel)machine); }
//protected virtual TModel Create(int inputs, TKernel kernel) //{ // return SupportVectorLearningHelper.Create<TModel, TInput, TKernel>(inputs, kernel); //} /// <summary> /// Learns a model that can map the given inputs to the given outputs. /// </summary> /// <param name="x">The model inputs.</param> /// <param name="y">The desired outputs associated with each <paramref name="x">inputs</paramref>.</param> /// <param name="weights">The weight of importance for each input-output pair.</param> /// <returns> /// A model that has learned how to produce <paramref name="y" /> given <paramref name="x" />. /// </returns> public override TModel Learn(TInput[] x, bool[] y, double[] weights = null) { bool initialized = false; SupportVectorLearningHelper.CheckArgs(x, y); if (kernel == null) { kernel = SupportVectorLearningHelper.CreateKernel <TKernel, TInput>(x); initialized = true; } if (!initialized && useKernelEstimation) { kernel = SupportVectorLearningHelper.EstimateKernel(kernel, x); } if (Model == null) { Model = Create(SupportVectorLearningHelper.GetNumberOfInputs(kernel, x), kernel); } Model.Kernel = kernel; // Initial argument checking SupportVectorLearningHelper.CheckArgs(Model, x, y); // Count class prevalence int positives, negatives; Classes.GetRatio(y, out positives, out negatives); // If all examples are positive or negative, terminate // learning early by directly setting the threshold. try { if (positives == 0 || negatives == 0) { Model.SupportVectors = new TInput[0]; Model.Weights = new double[0]; Model.Threshold = (positives == 0) ? -1 : +1; return(Model); } // Initialization heuristics if (useComplexityHeuristic) { complexity = kernel.EstimateComplexity(x); } if (useClassLabelProportion) { WeightRatio = positives / (double)negatives; } // Create per sample complexity Cpositive = complexity * positiveWeight; Cnegative = complexity * negativeWeight; Inputs = x; C = new double[y.Length]; for (int i = 0; i < y.Length; i++) { C[i] = y[i] ? Cpositive : Cnegative; } Outputs = new int[y.Length]; for (int i = 0; i < y.Length; i++) { Outputs[i] = y[i] ? 1 : -1; } if (weights != null) { for (int i = 0; i < C.Length; i++) { C[i] *= weights[i]; } } InnerRun(); SupportVectorLearningHelper.CheckOutput(Model); return(Model); } finally { if (machine != null) { // TODO: This block is only necessary to offer compatibility // to code written using previous versions of the framework, // and should be removed after a few releases. machine.SupportVectors = Model.SupportVectors; machine.Weights = Model.Weights; machine.Threshold = Model.Threshold; machine.Kernel = Model.Kernel; machine.IsProbabilistic = Model.IsProbabilistic; } } }
/// <summary> /// Learns a model that can map the given inputs to the desired outputs. /// </summary> /// <param name="x">The model inputs.</param> /// <param name="weights">The weight of importance for each input sample.</param> /// <returns> /// A model that has learned how to produce suitable outputs /// given the input data <paramref name="x" />. /// </returns> public TModel Learn(TInput[] x, double[] weights = null) { bool initialized = false; SupportVectorLearningHelper.CheckArgs(x); if (kernel == null) { kernel = SupportVectorLearningHelper.CreateKernel <TKernel, TInput>(x); initialized = true; } if (!initialized && useKernelEstimation) { kernel = SupportVectorLearningHelper.EstimateKernel(kernel, x); } if (Model == null) { Model = Create(SupportVectorLearningHelper.GetNumberOfInputs(kernel, x), kernel); } Model.Kernel = kernel; try { this.inputs = x; double[] zeros = new double[inputs.Length]; int[] ones = Vector.Ones <int>(inputs.Length); this.alpha = Vector.Ones <double>(inputs.Length); int l = inputs.Length; int n = (int)(nu * l); // # of alpha's at upper bound for (int i = 0; i < n; i++) { alpha[i] = 1; } if (n < inputs.Length) { alpha[n] = nu * l - n; } for (int i = n + 1; i < l; i++) { alpha[i] = 0; } Func <int, int[], int, double[], double[]> Q = (int i, int[] indices, int length, double[] row) => { for (int j = 0; j < length; j++) { row[j] = Kernel.Function(x[i], x[indices[j]]); } return(row); }; var s = new FanChenLinQuadraticOptimization(alpha.Length, Q, zeros, ones) { Tolerance = eps, Shrinking = this.shrinking, Solution = alpha, Token = Token }; bool success = s.Minimize(); int sv = 0; for (int i = 0; i < alpha.Length; i++) { if (alpha[i] > 0) { sv++; } } Model.SupportVectors = new TInput[sv]; Model.Weights = new double[sv]; for (int i = 0, j = 0; i < alpha.Length; i++) { if (alpha[i] > 0) { Model.SupportVectors[j] = inputs[i]; Model.Weights[j] = alpha[i]; j++; } } Model.Threshold = s.Rho; if (success == false) { throw new ConvergenceException("Convergence could not be attained. " + "Please reduce the cost of misclassification errors by reducing " + "the complexity parameter C or try a different kernel function."); } } finally { if (machine != null) { // TODO: This block is only necessary to offer compatibility // to code written using previous versions of the framework, // and should be removed after a few releases. machine.SupportVectors = Model.SupportVectors; machine.Weights = Model.Weights; machine.Threshold = Model.Threshold; machine.Kernel = Model.Kernel; machine.IsProbabilistic = Model.IsProbabilistic; } } return(Model); }