/// <summary> /// Trains a linear binary classifier using the averaged perceptron. /// <a href='https://en.wikipedia.org/wiki/Perceptron'>Wikipedia entry for Perceptron</a> /// </summary> /// <param name="env">The local instance of the <see cref="IHostEnvironment"/></param> /// <param name="lossFunction">The classification loss function. </param> /// <param name="label">The name of the label column. </param> /// <param name="features">The name of the feature column.</param> /// <param name="weights">The optional name of the weights column.</param> /// <param name="learningRate">The learning rate. </param> /// <param name="decreaseLearningRate">Wheather to decrease learning rate as iterations progress.</param> /// <param name="l2RegularizerWeight">L2 Regularization Weight.</param> /// <param name="numIterations">The number of training iteraitons.</param> /// <param name="advancedSettings">A delegate to supply more advanced arguments to the algorithm.</param> public AveragedPerceptronTrainer(IHostEnvironment env, string label, string features, string weights = null, ISupportClassificationLossFactory lossFunction = null, float learningRate = Arguments.AveragedDefaultArgs.LearningRate, bool decreaseLearningRate = Arguments.AveragedDefaultArgs.DecreaseLearningRate, float l2RegularizerWeight = Arguments.AveragedDefaultArgs.L2RegularizerWeight, int numIterations = Arguments.AveragedDefaultArgs.NumIterations, Action <Arguments> advancedSettings = null) : this(env, new Arguments { LabelColumn = label, FeatureColumn = features, InitialWeights = weights, LearningRate = learningRate, DecreaseLearningRate = decreaseLearningRate, L2RegularizerWeight = l2RegularizerWeight, NumIterations = numIterations }) { if (lossFunction == null) { lossFunction = new HingeLoss.Arguments(); } LossFunction = lossFunction.CreateComponent(env); if (advancedSettings != null) { advancedSettings.Invoke(_args); } }
public void LossHinge() { HingeLoss.Arguments args = new HingeLoss.Arguments(); HingeLoss loss = new HingeLoss(args); // Positive examples. TestHelper(loss, 1, 2, 0, 0); TestHelper(loss, 1, 1, 0, 0, false); TestHelper(loss, 1, 0.99, 0.01, 1, false); TestHelper(loss, 1, 0.5, 0.5, 1); // Negative examples. TestHelper(loss, 0, 0.5, 1.5, -1); TestHelper(loss, 0, -0.5, 0.5, -1); TestHelper(loss, 0, -1, 0, 0, false); TestHelper(loss, 0, -2, 0, 0); }