/// <summary> /// Trains a linear binary classifier using the averaged perceptron. /// <a href='https://en.wikipedia.org/wiki/Perceptron'>Wikipedia entry for Perceptron</a> /// </summary> /// <param name="env">The local instance of the <see cref="IHostEnvironment"/></param> /// <param name="lossFunction">The classification loss function. </param> /// <param name="label">The name of the label column. </param> /// <param name="features">The name of the feature column.</param> /// <param name="weights">The optional name of the weights column.</param> /// <param name="learningRate">The learning rate. </param> /// <param name="decreaseLearningRate">Wheather to decrease learning rate as iterations progress.</param> /// <param name="l2RegularizerWeight">L2 Regularization Weight.</param> /// <param name="numIterations">The number of training iteraitons.</param> /// <param name="advancedSettings">A delegate to supply more advanced arguments to the algorithm.</param> public AveragedPerceptronTrainer(IHostEnvironment env, string label, string features, string weights = null, ISupportClassificationLossFactory lossFunction = null, float learningRate = Arguments.AveragedDefaultArgs.LearningRate, bool decreaseLearningRate = Arguments.AveragedDefaultArgs.DecreaseLearningRate, float l2RegularizerWeight = Arguments.AveragedDefaultArgs.L2RegularizerWeight, int numIterations = Arguments.AveragedDefaultArgs.NumIterations, Action <Arguments> advancedSettings = null) : this(env, new Arguments { LabelColumn = label, FeatureColumn = features, InitialWeights = weights, LearningRate = learningRate, DecreaseLearningRate = decreaseLearningRate, L2RegularizerWeight = l2RegularizerWeight, NumIterations = numIterations }) { if (lossFunction == null) { lossFunction = new HingeLoss.Arguments(); } LossFunction = lossFunction.CreateComponent(env); if (advancedSettings != null) { advancedSettings.Invoke(_args); } }
/// <summary> /// Predict a target using a linear binary classification model trained with the <see cref="StochasticGradientDescentClassificationTrainer"/> trainer. /// </summary> /// <param name="ctx">The binary classificaiton context trainer object.</param> /// <param name="labelColumn">The name of the label column.</param> /// <param name="featureColumn">The name of the feature column.</param> /// <param name="weights">The name for the example weight column.</param> /// <param name="maxIterations">The maximum number of iterations; set to 1 to simulate online learning.</param> /// <param name="initLearningRate">The initial learning rate used by SGD.</param> /// <param name="l2Weight">The L2 regularization constant.</param> /// <param name="loss">The loss function to use.</param> /// <param name="advancedSettings">A delegate to apply all the advanced arguments to the algorithm.</param> public static StochasticGradientDescentClassificationTrainer StochasticGradientDescent(this BinaryClassificationContext.BinaryClassificationTrainers ctx, string labelColumn = DefaultColumnNames.Label, string featureColumn = DefaultColumnNames.Features, string weights = null, int maxIterations = SgdArguments.Defaults.MaxIterations, double initLearningRate = SgdArguments.Defaults.InitLearningRate, float l2Weight = SgdArguments.Defaults.L2Weight, ISupportClassificationLossFactory loss = null, Action <SgdArguments> advancedSettings = null) { Contracts.CheckValue(ctx, nameof(ctx)); var env = CatalogUtils.GetEnvironment(ctx); return(new StochasticGradientDescentClassificationTrainer(env, labelColumn, featureColumn, weights, maxIterations, initLearningRate, l2Weight, loss, advancedSettings)); }
/// <summary> /// Predict a target using a linear binary classification model trained with the <see cref="Microsoft.ML.Trainers.StochasticGradientDescentClassificationTrainer"/> trainer. /// </summary> /// <param name="ctx">The binary classificaiton context trainer object.</param> /// <param name="label">The name of the label column.</param> /// <param name="features">The name of the feature column.</param> /// <param name="weights">The name for the example weight column.</param> /// <param name="maxIterations">The maximum number of iterations; set to 1 to simulate online learning.</param> /// <param name="initLearningRate">The initial learning rate used by SGD.</param> /// <param name="l2Weight">The L2 regularization constant.</param> /// <param name="loss">The loss function to use.</param> /// <param name="advancedSettings">A delegate to apply all the advanced arguments to the algorithm.</param> /// <param name="onFit">A delegate that is called every time the /// <see cref="Estimator{TTupleInShape, TTupleOutShape, TTransformer}.Fit(DataView{TTupleInShape})"/> method is called on the /// <see cref="Estimator{TTupleInShape, TTupleOutShape, TTransformer}"/> instance created out of this. This delegate will receive /// the linear model that was trained. Note that this action cannot change the result in any way; it is only a way for the caller to /// be informed about what was learnt.</param> /// <returns>The predicted output.</returns> public static (Scalar <float> score, Scalar <float> probability, Scalar <bool> predictedLabel) StochasticGradientDescentClassificationTrainer(this BinaryClassificationContext.BinaryClassificationTrainers ctx, Scalar <bool> label, Vector <float> features, Scalar <float> weights = null, int maxIterations = Arguments.Defaults.MaxIterations, double initLearningRate = Arguments.Defaults.InitLearningRate, float l2Weight = Arguments.Defaults.L2Weight, ISupportClassificationLossFactory loss = null, Action <Arguments> advancedSettings = null, Action <IPredictorWithFeatureWeights <float> > onFit = null) { var rec = new TrainerEstimatorReconciler.BinaryClassifier( (env, labelName, featuresName, weightsName) => { var trainer = new StochasticGradientDescentClassificationTrainer(env, labelName, featuresName, weightsName, maxIterations, initLearningRate, l2Weight, loss, advancedSettings); if (onFit != null) { return(trainer.WithOnFitDelegate(trans => onFit(trans.Model))); } return(trainer); }, label, features, weights); return(rec.Output); }