/// <summary> /// Converts <see cref="SupportVectorMachineLearningConfigurationFunction"/> /// into a lambda function that can be passed to the <see cref=" OneVsOneLearning{TInput, TBinary, TModel}.Learner"/> /// property of a <see cref="MulticlassSupportVectorLearning"/> learning algorithm. /// </summary> /// public Func <InnerParameters, InnerLearning> Convert(SupportVectorMachineLearningConfigurationFunction conf) { return(delegate(InnerParameters parameters) { int[] y = Classes.ToMinusOnePlusOne(parameters.Outputs); var machine = (KernelSupportVectorMachine)parameters.Model; ISupportVectorMachineLearning r = conf(machine, parameters.Inputs, y, parameters.Pair.Class1, parameters.Pair.Class2); var c = r as ISupervisedLearning <SupportVectorMachine <IKernel <double[]> >, double[], bool>; if (c != null) { return c; } // TODO: The following checks exist only to provide support to previous way of using // the library and should be removed after a few releases. var svc = r as ISupportVectorMachineLearning; if (svc != null) { svc.Run(); return null; } throw new Exception(); }); }
public void Reset() { // m_bow = null; m_machine = null; m_teacher = null; m_trainImageFeatureVectors = null; m_classIdClassNameMap = new Dictionary <int, string>(); }
/// <summary> /// Learns a Support Vector Machines (SVM). /// </summary> /// <param name="trainer">The learning algorithm.</param> /// <param name="x">The input vectors <paramref name="x"/>.</param> /// <param name="y">The expected binary output <paramref name="y"/>.</param> /// <param name="weights">The <c>weight</c> of importance for each input vector (if supported by the learning algorithm).</param> /// <param name="cancellationToken">The cancellationToken token used to notify the machine that the operation should be canceled.</param> /// <returns> /// The <see cref="SupportVectorMachine"/> learned by this method. /// A model that has learned how to produce <paramref name="y"/> given <paramref name="x"/>. /// </returns> public static SupportVectorMachine Learn( ISupportVectorMachineLearning trainer, IList <float[]> x, IList <bool> y, IList <float> weights, CancellationToken cancellationToken) { if (trainer == null) { throw new ArgumentNullException(nameof(trainer)); } return(trainer.Learn(x, y, weights, cancellationToken)); }
public static void train_one(Problem prob, Parameters param, out double[] w, double Cp, double Cn) { double[][] inputs = prob.Inputs; int[] labels = prob.Outputs.Apply(x => x >= 0 ? 1 : -1); double eps = param.Tolerance; int pos = 0; for (int i = 0; i < labels.Length; i++) { if (labels[i] >= 0) { pos++; } } int neg = prob.Outputs.Length - pos; double primal_solver_tol = eps * Math.Max(Math.Min(pos, neg), 1.0) / prob.Inputs.Length; SupportVectorMachine svm = new SupportVectorMachine(prob.Dimensions); ISupportVectorMachineLearning teacher = null; switch (param.Solver) { case LibSvmSolverType.L2RegularizedLogisticRegression: // l2r_lr_fun teacher = new ProbabilisticNewtonMethod(svm, inputs, labels) { PositiveWeight = Cp, NegativeWeight = Cn, Tolerance = primal_solver_tol }; break; case LibSvmSolverType.L2RegularizedL2LossSvc: // fun_obj=new l2r_l2_svc_fun(prob, C); teacher = new LinearNewtonMethod(svm, inputs, labels) { PositiveWeight = Cp, NegativeWeight = Cn, Tolerance = primal_solver_tol }; break; case LibSvmSolverType.L2RegularizedL2LossSvcDual: // solve_l2r_l1l2_svc(prob, w, eps, Cp, Cn, L2R_L2LOSS_SVC_DUAL); teacher = new LinearDualCoordinateDescent(svm, inputs, labels) { Loss = Loss.L2, PositiveWeight = Cp, NegativeWeight = Cn, }; break; case LibSvmSolverType.L2RegularizedL1LossSvcDual: // solve_l2r_l1l2_svc(prob, w, eps, Cp, Cn, L2R_L1LOSS_SVC_DUAL); teacher = new LinearDualCoordinateDescent(svm, inputs, labels) { Loss = Loss.L1, PositiveWeight = Cp, NegativeWeight = Cn, }; break; case LibSvmSolverType.L1RegularizedLogisticRegression: // solve_l1r_lr(&prob_col, w, primal_solver_tol, Cp, Cn); teacher = new ProbabilisticCoordinateDescent(svm, inputs, labels) { PositiveWeight = Cp, NegativeWeight = Cn, Tolerance = primal_solver_tol }; break; case LibSvmSolverType.L2RegularizedLogisticRegressionDual: // solve_l2r_lr_dual(prob, w, eps, Cp, Cn); teacher = new ProbabilisticDualCoordinateDescent(svm, inputs, labels) { PositiveWeight = Cp, NegativeWeight = Cn, Tolerance = primal_solver_tol, }; break; } Trace.WriteLine("Training " + param.Solver); // run the learning algorithm var sw = Stopwatch.StartNew(); double error = teacher.Run(); sw.Stop(); // save the solution w = svm.ToWeights(); Trace.WriteLine(String.Format("Finished {0}: {1} in {2}", param.Solver, error, sw.Elapsed)); }
/// <summary> /// Learns a model that can map the given inputs to the given outputs. /// </summary> /// <param name="trainer">The learning algorithm.</param> /// <param name="numberOfClasses">The number of classes.</param> /// <param name="x">The input vectors <paramref name="x"/>.</param> /// <param name="y">The expected binary output <paramref name="y"/>.</param> /// <param name="weights">The <c>weight</c> of importance for each input vector (if supported by the learning algorithm).</param> /// <param name="cancellationToken">The cancellationToken token used to notify the machine that the operation should be canceled.</param> /// <exception cref="ArgumentNullException"> /// <para><paramref name="trainer"/> is <b>null</b>.</para> /// <para>-or-</para> /// <para><paramref name="x"/> is <b>null</b>.</para> /// <para>-or-</para> /// <para><paramref name="y"/> is <b>null</b>.</para> /// </exception> /// <exception cref="ArgumentException"> /// <para><paramref name="numberOfClasses"/> is less than 2.</para> /// <para>-or-</para> /// <para>The number of elements in <paramref name="y"/> does not match the number of elements in <paramref name="x"/>.</para> /// <para>-or-</para> /// <para><paramref name="weights"/> is not <b>null</b> and the number of elements in <paramref name="weights"/> does not match the number of elements in <paramref name="x"/>.</para> /// </exception> /// <returns> /// The <see cref="OneVsAllSupportVectorMachine"/> learned by this method. /// A model that has learned how to produce <paramref name="y"/> given <paramref name="x"/>. /// </returns> public static OneVsAllSupportVectorMachine Learn( ISupportVectorMachineLearning trainer, int numberOfClasses, IList <float[]> x, IList <int> y, IList <float> weights, CancellationToken cancellationToken) { if (trainer == null) { throw new ArgumentNullException(nameof(trainer)); } if (x == null) { throw new ArgumentNullException(nameof(x)); } if (y == null) { throw new ArgumentNullException(nameof(y)); } if (numberOfClasses < 2) { throw new ArgumentException("The machine must have at least two classes.", nameof(numberOfClasses)); } if (y.Count != x.Count) { throw new ArgumentException("The number of output labels must match the number of input vectors.", nameof(y)); } // create the machines SupportVectorMachine[] machines = new SupportVectorMachine[numberOfClasses]; // train each machine int sampleCount = x.Count; CommonParallel.For( 0, machines.Length, (a, b) => { for (int i = a; i < b; i++) { bool[] expected = new bool[sampleCount]; for (int j = 0; j < sampleCount; j++) { expected[j] = y[j] == i; } machines[i] = SupportVectorMachine.Learn(trainer, x, expected, weights, cancellationToken); } }, new ParallelOptions() { CancellationToken = cancellationToken, }); return(new OneVsAllSupportVectorMachine(machines)); }
// IKernel kernel; public double BuildTheModel(double[][] inputs, int[] outputs, int ClassNum, ConfigurationFieldClassifier config) { cpuCounter.CategoryName = "Processor"; cpuCounter.CounterName = "% Processor Time"; cpuCounter.InstanceName = "_Total"; Reset(); _usenongoldenclass = config.FeatureExtraction.UseNonGoldenClass; // scalers = scalresin; IKernel kernal = null; switch (config.AccordConfiguration.Kernel) { case KernelTypes.Gaussian: kernal = new Gaussian(config.AccordConfiguration.GaussianKernel.Sigma); break; case KernelTypes.Polynomial: kernal = new Polynomial(config.AccordConfiguration.PolynominalKernel.Degree, config.AccordConfiguration.PolynominalKernel.Constant); break; case KernelTypes.ChiSquare: kernal = new ChiSquare(); break; case KernelTypes.HistogramIntersction: kernal = new HistogramIntersection(); break; default: break; } if (ClassNum > 2) { m_machine = new MulticlassSupportVectorMachine(inputs[0].Length, kernal, ClassNum); m_teacher = (new MulticlassSupportVectorLearning((MulticlassSupportVectorMachine)m_machine, inputs, outputs)); (m_teacher as MulticlassSupportVectorLearning).Algorithm = (svm, classInputs, classOutputs, i, j) => { var smo = new SequentialMinimalOptimization(svm, classInputs, classOutputs); smo.Complexity = config.AccordConfiguration.Complexity; smo.Tolerance = config.AccordConfiguration.Tolerance; smo.CacheSize = config.AccordConfiguration.CacheSize; smo.Strategy = (Accord.MachineLearning.VectorMachines.Learning.SelectionStrategy)((int)(config.AccordConfiguration.SelectionStrategy)); // smo.UseComplexityHeuristic = true; // smo.PositiveWeight = 1; int k = 0; while (cpuCounter.NextValue() > 50) { Thread.Sleep(50); k++; if (k > 30000) { break; } } // smo.NegativeWeight = 1; smo.Run(); var probabilisticOutputLearning = new ProbabilisticOutputLearning(svm, classInputs, classOutputs); return(probabilisticOutputLearning); // return smo; }; } else { // FIX TO BASE TYPES THAN RUN THAN MAKE OTHER 2 CHANGES from LATEST - line ... and CLUSTER AND RUNTEST.. and check again... m_machine = new SupportVectorMachine(inputs[0].Length); m_teacher = new SequentialMinimalOptimization((SupportVectorMachine)m_machine, inputs, outputs); (m_teacher as SequentialMinimalOptimization).Complexity = config.AccordConfiguration.Complexity;; (m_teacher as SequentialMinimalOptimization).Tolerance = config.AccordConfiguration.Tolerance; (m_teacher as SequentialMinimalOptimization).CacheSize = config.AccordConfiguration.CacheSize; (m_teacher as SequentialMinimalOptimization).Strategy = (Accord.MachineLearning.VectorMachines.Learning.SelectionStrategy)((int)(config.AccordConfiguration.SelectionStrategy)); (m_teacher as SequentialMinimalOptimization).Complexity = config.AccordConfiguration.Complexity;; } // Configure the learning algorithm // Train the machines. It should take a while. // Thread.Sleep(10000); //#if temp double error = m_teacher.Run(); //#endif // return 0; return(error); }