/// <summary> /// Predict a target using a linear classification model trained with the <see cref="SgdNonCalibratedTrainer"/> trainer. /// </summary> /// <param name="catalog">The binary classification catalog trainer object.</param> /// <param name="label">The name of the label column.</param> /// <param name="features">The name of the feature column.</param> /// <param name="weights">The name for the example weight column.</param> /// <param name="numberOfIterations">The maximum number of iterations; set to 1 to simulate online learning.</param> /// <param name="learningRate">The initial learning rate used by SGD.</param> /// <param name="l2Regularization">The L2 weight for <a href='https://en.wikipedia.org/wiki/Regularization_(mathematics)'>regularization</a>.</param> /// <param name="lossFunction">The loss function to use.</param> /// <param name="onFit">A delegate that is called every time the /// <see cref="Estimator{TTupleInShape, TTupleOutShape, TTransformer}.Fit(DataView{TTupleInShape})"/> method is called on the /// <see cref="Estimator{TTupleInShape, TTupleOutShape, TTransformer}"/> instance created out of this. This delegate will receive /// the linear model that was trained. Note that this action cannot change the result in any way; it is only a way for the caller to /// be informed about what was learnt.</param> /// <returns>The predicted output.</returns> public static (Scalar <float> score, Scalar <bool> predictedLabel) StochasticGradientDescentNonCalibratedClassificationTrainer( this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, Scalar <bool> label, Vector <float> features, Scalar <float> weights = null, int numberOfIterations = SgdNonCalibratedTrainer.Options.Defaults.NumberOfIterations, double learningRate = SgdNonCalibratedTrainer.Options.Defaults.LearningRate, float l2Regularization = SgdNonCalibratedTrainer.Options.Defaults.L2Regularization, IClassificationLoss lossFunction = null, Action <LinearBinaryModelParameters> onFit = null) { var rec = new TrainerEstimatorReconciler.BinaryClassifierNoCalibration( (env, labelName, featuresName, weightsName) => { var trainer = new SgdNonCalibratedTrainer(env, labelName, featuresName, weightsName, numberOfIterations, learningRate, l2Regularization, lossFunction); if (onFit != null) { return(trainer.WithOnFitDelegate(trans => onFit(trans.Model))); } return(trainer); }, label, features, weights); return(rec.Output); }
/// <summary> /// Predict a target using a linear binary classification model trained with the AveragedPerceptron trainer, and a custom loss. /// </summary> /// <param name="catalog">The binary classification catalog trainer object.</param> /// <param name="label">The label, or dependent variable.</param> /// <param name="features">The features, or independent variables.</param> /// <param name="lossFunction">The custom loss.</param> /// <param name="weights">The optional example weights.</param> /// <param name="learningRate">The learning Rate.</param> /// <param name="decreaseLearningRate">Decrease learning rate as iterations progress.</param> /// <param name="l2Regularization">L2 regularization weight.</param> /// <param name="numIterations">Number of training iterations through the data.</param> /// <param name="onFit">A delegate that is called every time the /// <see cref="Estimator{TInShape, TOutShape, TTransformer}.Fit(DataView{TInShape})"/> method is called on the /// <see cref="Estimator{TInShape, TOutShape, TTransformer}"/> instance created out of this. This delegate will receive /// the linear model that was trained, as well as the calibrator on top of that model. Note that this action cannot change the /// result in any way; it is only a way for the caller to be informed about what was learnt.</param> /// <returns>The set of output columns including in order the predicted binary classification score (which will range /// from negative to positive infinity), and the predicted label.</returns> /// <seealso cref="AveragedPerceptronTrainer"/>. /// <example> /// <format type="text/markdown"> /// <![CDATA[ /// [!code-csharp[AveragedPerceptron](~/../docs/samples/docs/samples/Microsoft.ML.Samples/Static/AveragedPerceptronBinaryClassification.cs)] /// ]]></format> /// </example> public static (Scalar <float> score, Scalar <bool> predictedLabel) AveragedPerceptron( this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, Scalar <bool> label, Vector <float> features, Scalar <float> weights = null, IClassificationLoss lossFunction = null, float learningRate = AveragedLinearOptions.AveragedDefault.LearningRate, bool decreaseLearningRate = AveragedLinearOptions.AveragedDefault.DecreaseLearningRate, float l2Regularization = AveragedLinearOptions.AveragedDefault.L2Regularization, int numIterations = AveragedLinearOptions.AveragedDefault.NumberOfIterations, Action <LinearBinaryModelParameters> onFit = null ) { OnlineLinearStaticUtils.CheckUserParams(label, features, weights, learningRate, l2Regularization, numIterations, onFit); bool hasProbs = lossFunction is LogLoss; var rec = new TrainerEstimatorReconciler.BinaryClassifierNoCalibration( (env, labelName, featuresName, weightsName) => { var trainer = new AveragedPerceptronTrainer(env, labelName, featuresName, lossFunction, learningRate, decreaseLearningRate, l2Regularization, numIterations); if (onFit != null) { return(trainer.WithOnFitDelegate(trans => onFit(trans.Model))); } else { return(trainer); } }, label, features, weights); return(rec.Output); }
/// <summary> /// Predict a target using a linear binary classification model trained with the AveragedPerceptron trainer, and a custom loss. /// </summary> /// <param name="ctx">The binary classification context trainer object.</param> /// <param name="label">The label, or dependent variable.</param> /// <param name="features">The features, or independent variables.</param> /// <param name="lossFunction">The custom loss.</param> /// <param name="weights">The optional example weights.</param> /// <param name="learningRate">The learning Rate.</param> /// <param name="decreaseLearningRate">Decrease learning rate as iterations progress.</param> /// <param name="l2RegularizerWeight">L2 regularization weight.</param> /// <param name="numIterations">Number of training iterations through the data.</param> /// <param name="onFit">A delegate that is called every time the /// <see cref="Estimator{TTupleInShape, TTupleOutShape, TTransformer}.Fit(DataView{TTupleInShape})"/> method is called on the /// <see cref="Estimator{TTupleInShape, TTupleOutShape, TTransformer}"/> instance created out of this. This delegate will receive /// the linear model that was trained, as well as the calibrator on top of that model. Note that this action cannot change the /// result in any way; it is only a way for the caller to be informed about what was learnt.</param> /// <returns>The set of output columns including in order the predicted binary classification score (which will range /// from negative to positive infinity), and the predicted label.</returns> /// <seealso cref="AveragedPerceptronTrainer"/>. public static (Scalar <float> score, Scalar <bool> predictedLabel) AveragedPerceptron( this BinaryClassificationContext.BinaryClassificationTrainers ctx, IClassificationLoss lossFunction, Scalar <bool> label, Vector <float> features, Scalar <float> weights = null, float learningRate = AveragedLinearArguments.AveragedDefaultArgs.LearningRate, bool decreaseLearningRate = AveragedLinearArguments.AveragedDefaultArgs.DecreaseLearningRate, float l2RegularizerWeight = AveragedLinearArguments.AveragedDefaultArgs.L2RegularizerWeight, int numIterations = AveragedLinearArguments.AveragedDefaultArgs.NumIterations, Action <LinearBinaryPredictor> onFit = null ) { OnlineLinearStaticUtils.CheckUserParams(label, features, weights, learningRate, l2RegularizerWeight, numIterations, onFit); bool hasProbs = lossFunction is HingeLoss; var args = new AveragedPerceptronTrainer.Arguments() { LearningRate = learningRate, DecreaseLearningRate = decreaseLearningRate, L2RegularizerWeight = l2RegularizerWeight, NumIterations = numIterations }; if (lossFunction != null) { args.LossFunction = new TrivialClassificationLossFactory(lossFunction); } var rec = new TrainerEstimatorReconciler.BinaryClassifierNoCalibration( (env, labelName, featuresName, weightsName) => { args.FeatureColumn = featuresName; args.LabelColumn = labelName; args.InitialWeights = weightsName; var trainer = new AveragedPerceptronTrainer(env, args); if (onFit != null) { return(trainer.WithOnFitDelegate(trans => onFit(trans.Model))); } else { return(trainer); } }, label, features, weights, hasProbs); return(rec.Output); }
/// <summary> /// Predict a target using a linear binary classification model trained with <see cref="AveragedPerceptronTrainer"/>. /// </summary> /// <param name="catalog">The binary classification catalog trainer object.</param> /// <param name="labelColumn">The name of the label column, or dependent variable.</param> /// <param name="featureColumn">The features, or independent variables.</param> /// <param name="lossFunction">A custom <a href="tmpurl_loss">loss</a>. If <see langword="null"/>, hinge loss will be used resulting in max-margin averaged perceptron.</param> /// <param name="learningRate"><a href="tmpurl_lr">Learning rate</a>.</param> /// <param name="decreaseLearningRate"> /// <see langword="true" /> to decrease the <paramref name="learningRate"/> as iterations progress; otherwise, <see langword="false" />. /// Default is <see langword="false" />. /// </param> /// <param name="l2RegularizerWeight">L2 weight for <a href='tmpurl_regularization'>regularization</a>.</param> /// <param name="numIterations">Number of passes through the training dataset.</param> /// <example> /// <format type="text/markdown"> /// <![CDATA[ /// [!code-csharp[AveragedPerceptron](~/../docs/samples/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/AveragedPerceptron.cs)] /// ]]> /// </format> /// </example> public static AveragedPerceptronTrainer AveragedPerceptron( this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, string labelColumn = DefaultColumnNames.Label, string featureColumn = DefaultColumnNames.Features, IClassificationLoss lossFunction = null, float learningRate = AveragedLinearArguments.AveragedDefaultArgs.LearningRate, bool decreaseLearningRate = AveragedLinearArguments.AveragedDefaultArgs.DecreaseLearningRate, float l2RegularizerWeight = AveragedLinearArguments.AveragedDefaultArgs.L2RegularizerWeight, int numIterations = AveragedLinearArguments.AveragedDefaultArgs.NumIterations) { Contracts.CheckValue(catalog, nameof(catalog)); var env = CatalogUtils.GetEnvironment(catalog); return(new AveragedPerceptronTrainer(env, labelColumn, featureColumn, lossFunction ?? new LogLoss(), learningRate, decreaseLearningRate, l2RegularizerWeight, numIterations)); }
/// <summary> /// Predict a target using a linear binary classification model trained with the AveragedPerceptron trainer, and a custom loss. /// </summary> /// <param name="ctx">The binary classification context trainer object.</param> /// <param name="label">The label, or dependent variable.</param> /// <param name="features">The features, or independent variables.</param> /// <param name="lossFunction">The custom loss.</param> /// <param name="weights">The optional example weights.</param> /// <param name="learningRate">The learning Rate.</param> /// <param name="decreaseLearningRate">Decrease learning rate as iterations progress.</param> /// <param name="l2RegularizerWeight">L2 regularization weight.</param> /// <param name="numIterations">Number of training iterations through the data.</param> /// <param name="advancedSettings">A delegate to supply more advanced arguments to the algorithm.</param> public static AveragedPerceptronTrainer AveragedPerceptron( this BinaryClassificationContext.BinaryClassificationTrainers ctx, string label = DefaultColumnNames.Label, string features = DefaultColumnNames.Features, string weights = null, IClassificationLoss lossFunction = null, float learningRate = AveragedLinearArguments.AveragedDefaultArgs.LearningRate, bool decreaseLearningRate = AveragedLinearArguments.AveragedDefaultArgs.DecreaseLearningRate, float l2RegularizerWeight = AveragedLinearArguments.AveragedDefaultArgs.L2RegularizerWeight, int numIterations = AveragedLinearArguments.AveragedDefaultArgs.NumIterations, Action <AveragedPerceptronTrainer.Arguments> advancedSettings = null) { Contracts.CheckValue(ctx, nameof(ctx)); var env = CatalogUtils.GetEnvironment(ctx); return(new AveragedPerceptronTrainer(env, label, features, weights, lossFunction ?? new LogLoss(), learningRate, decreaseLearningRate, l2RegularizerWeight, numIterations, advancedSettings)); }
/// <summary> /// Trains a linear binary classifier using the averaged perceptron. /// <a href='https://en.wikipedia.org/wiki/Perceptron'>Wikipedia entry for Perceptron</a> /// </summary> /// <param name="env">The local instance of the <see cref="IHostEnvironment"/></param> /// <param name="lossFunction">The classification loss function. </param> /// <param name="labelColumnName">The name of the label column. </param> /// <param name="featureColumnName">The name of the feature column.</param> /// <param name="learningRate">The learning rate. </param> /// <param name="decreaseLearningRate">Whether to decrease learning rate as iterations progress.</param> /// <param name="l2Regularization">Weight of L2 regularization term.</param> /// <param name="numberOfIterations">The number of training iterations.</param> internal AveragedPerceptronTrainer(IHostEnvironment env, string labelColumnName = DefaultColumnNames.Label, string featureColumnName = DefaultColumnNames.Features, IClassificationLoss lossFunction = null, float learningRate = Options.AveragedDefault.LearningRate, bool decreaseLearningRate = Options.AveragedDefault.DecreaseLearningRate, float l2Regularization = Options.AveragedDefault.L2Regularization, int numberOfIterations = Options.AveragedDefault.NumberOfIterations) : this(env, new Options { LabelColumnName = labelColumnName, FeatureColumnName = featureColumnName, LearningRate = learningRate, DecreaseLearningRate = decreaseLearningRate, L2Regularization = l2Regularization, NumberOfIterations = numberOfIterations, LossFunction = lossFunction ?? new HingeLoss() }) { }
/// <summary> /// Trains a linear binary classifier using the averaged perceptron. /// <a href='https://en.wikipedia.org/wiki/Perceptron'>Wikipedia entry for Perceptron</a> /// </summary> /// <param name="env">The local instance of the <see cref="IHostEnvironment"/></param> /// <param name="lossFunction">The classification loss function. </param> /// <param name="labelColumn">The name of the label column. </param> /// <param name="featureColumn">The name of the feature column.</param> /// <param name="learningRate">The learning rate. </param> /// <param name="decreaseLearningRate">Wheather to decrease learning rate as iterations progress.</param> /// <param name="l2RegularizerWeight">L2 Regularization Weight.</param> /// <param name="numIterations">The number of training iteraitons.</param> internal AveragedPerceptronTrainer(IHostEnvironment env, string labelColumn = DefaultColumnNames.Label, string featureColumn = DefaultColumnNames.Features, IClassificationLoss lossFunction = null, float learningRate = Options.AveragedDefaultArgs.LearningRate, bool decreaseLearningRate = Options.AveragedDefaultArgs.DecreaseLearningRate, float l2RegularizerWeight = Options.AveragedDefaultArgs.L2RegularizerWeight, int numIterations = Options.AveragedDefaultArgs.NumIterations) : this(env, new Options { LabelColumn = labelColumn, FeatureColumn = featureColumn, LearningRate = learningRate, DecreaseLearningRate = decreaseLearningRate, L2RegularizerWeight = l2RegularizerWeight, NumIterations = numIterations, LossFunction = new TrivialFactory(lossFunction ?? new HingeLoss()) }) { }
/// <summary> /// Predict a target using a linear binary classification model trained with the AveragedPerceptron trainer, and a custom loss. /// </summary> /// <param name="catalog">The binary classification catalog trainer object.</param> /// <param name="label">The label, or dependent variable.</param> /// <param name="features">The features, or independent variables.</param> /// <param name="lossFunction">The custom loss.</param> /// <param name="weights">The optional example weights.</param> /// <param name="options">Advanced arguments to the algorithm.</param> /// <param name="onFit">A delegate that is called every time the /// <see cref="Estimator{TInShape, TOutShape, TTransformer}.Fit(DataView{TInShape})"/> method is called on the /// <see cref="Estimator{TInShape, TOutShape, TTransformer}"/> instance created out of this. This delegate will receive /// the linear model that was trained, as well as the calibrator on top of that model. Note that this action cannot change the /// result in any way; it is only a way for the caller to be informed about what was learnt.</param> /// <returns>The set of output columns including in order the predicted binary classification score (which will range /// from negative to positive infinity), and the predicted label.</returns> /// <seealso cref="AveragedPerceptronTrainer"/>. /// <example> /// <format type="text/markdown"> /// <![CDATA[ /// [!code-csharp[AveragedPerceptron](~/../docs/samples/docs/samples/Microsoft.ML.Samples/Static/AveragedPerceptronBinaryClassification.cs)] /// ]]></format> /// </example> public static (Scalar <float> score, Scalar <bool> predictedLabel) AveragedPerceptron( this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, Scalar <bool> label, Vector <float> features, Scalar <float> weights, IClassificationLoss lossFunction, AveragedPerceptronTrainer.Options options, Action <LinearBinaryModelParameters> onFit = null ) { Contracts.CheckValue(label, nameof(label)); Contracts.CheckValue(features, nameof(features)); Contracts.CheckValueOrNull(weights); Contracts.CheckValueOrNull(options); Contracts.CheckValueOrNull(onFit); bool hasProbs = lossFunction is LogLoss; var rec = new TrainerEstimatorReconciler.BinaryClassifierNoCalibration( (env, labelName, featuresName, weightsName) => { options.LabelColumn = labelName; options.FeatureColumn = featuresName; options.InitialWeights = weightsName; var trainer = new AveragedPerceptronTrainer(env, options); if (onFit != null) { return(trainer.WithOnFitDelegate(trans => onFit(trans.Model))); } else { return(trainer); } }, label, features, weights, hasProbs); return(rec.Output); }
public static PredictionEngine <TIn, TOut> AveragedPerceptron <TIn, TOut>( IEnumerable <TIn> trainDataset, IClassificationLoss lossFunction = null, float learningRate = 1f, bool decreaseLearningRate = false, float l2Regularization = 0f, int numberOfIterations = 1, Action <ITransformer> additionModelAction = null) where TIn : class, new() where TOut : class, new() { var context = new MLContext(); var type = typeof(TIn); var labelColumnName = Preprocessing.LabelColumn(type.GetProperties()).Name; var properties = Preprocessing.ExcludeColumns(type.GetProperties()); var preprocessor = context.OneHotEncoding(properties); var trainDataframe = context.Data.LoadFromEnumerable(trainDataset); var pipeline = context.Transforms.Concatenate("Features", preprocessor.CombinedFeatures.ToArray()) .Append(preprocessor.OneHotEncodingEstimator) .AppendCacheCheckpoint(context) .Append(context.BinaryClassification.Trainers.AveragedPerceptron( labelColumnName, featureColumnName: "Features", lossFunction, learningRate, decreaseLearningRate, l2Regularization, numberOfIterations )); var model = pipeline.Fit(trainDataframe); var predictEngine = context.Model.CreatePredictionEngine <TIn, TOut>(model); additionModelAction?.Invoke(model); return(predictEngine); }
/// <summary> /// Trains a linear binary classifier using the averaged perceptron. /// <a href='https://en.wikipedia.org/wiki/Perceptron'>Wikipedia entry for Perceptron</a> /// </summary> /// <param name="env">The local instance of the <see cref="IHostEnvironment"/></param> /// <param name="lossFunction">The classification loss function. </param> /// <param name="labelColumn">The name of the label column. </param> /// <param name="featureColumn">The name of the feature column.</param> /// <param name="weights">The optional name of the weights column.</param> /// <param name="learningRate">The learning rate. </param> /// <param name="decreaseLearningRate">Wheather to decrease learning rate as iterations progress.</param> /// <param name="l2RegularizerWeight">L2 Regularization Weight.</param> /// <param name="numIterations">The number of training iteraitons.</param> /// <param name="advancedSettings">A delegate to supply more advanced arguments to the algorithm.</param> public AveragedPerceptronTrainer(IHostEnvironment env, string labelColumn = DefaultColumnNames.Label, string featureColumn = DefaultColumnNames.Features, string weights = null, IClassificationLoss lossFunction = null, float learningRate = Arguments.AveragedDefaultArgs.LearningRate, bool decreaseLearningRate = Arguments.AveragedDefaultArgs.DecreaseLearningRate, float l2RegularizerWeight = Arguments.AveragedDefaultArgs.L2RegularizerWeight, int numIterations = Arguments.AveragedDefaultArgs.NumIterations, Action <Arguments> advancedSettings = null) : this(env, InvokeAdvanced(advancedSettings, new Arguments { LabelColumn = labelColumn, FeatureColumn = featureColumn, InitialWeights = weights, LearningRate = learningRate, DecreaseLearningRate = decreaseLearningRate, L2RegularizerWeight = l2RegularizerWeight, NumIterations = numIterations, LossFunction = new TrivialFactory(lossFunction ?? new HingeLoss()) })) { }
public TrivialClassificationLossFactory(IClassificationLoss loss) { _loss = loss; }
/// <summary> /// Predict a target using a linear classification model trained with the <see cref="SgdNonCalibratedBinaryTrainer"/> trainer. /// </summary> /// <param name="catalog">The binary classificaiton catalog trainer object.</param> /// <param name="labelColumnName">The name of the label column.</param> /// <param name="featureColumnName">The name of the feature column.</param> /// <param name="weightColumnName">The name for the example weight column.</param> /// <param name="loss">The loss function minimized in the training process. Using, for example, <see cref="HingeLoss"/> leads to a support vector machine trainer.</param> /// <param name="maxIterations">The maximum number of iterations; set to 1 to simulate online learning.</param> /// <param name="initLearningRate">The initial learning rate used by SGD.</param> /// <param name="l2Weight">The L2 regularization constant.</param> public static SgdNonCalibratedBinaryTrainer StochasticGradientDescentNonCalibrated(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, string labelColumnName = DefaultColumnNames.Label, string featureColumnName = DefaultColumnNames.Features, string weightColumnName = null, IClassificationLoss loss = null, int maxIterations = SgdNonCalibratedBinaryTrainer.Options.Defaults.MaxIterations, double initLearningRate = SgdNonCalibratedBinaryTrainer.Options.Defaults.InitLearningRate, float l2Weight = SgdNonCalibratedBinaryTrainer.Options.Defaults.L2Weight) { Contracts.CheckValue(catalog, nameof(catalog)); var env = CatalogUtils.GetEnvironment(catalog); return(new SgdNonCalibratedBinaryTrainer(env, labelColumnName, featureColumnName, weightColumnName, maxIterations, initLearningRate, l2Weight, loss)); }