public void LossSquared() { SquaredLoss loss = new SquaredLoss(); TestHelper(loss, 3, 2, 1, 2); TestHelper(loss, 3, 4, 1, -2); TestHelper(loss, 3, 5, 4, -4); TestHelper(loss, 0, -3, 9, 6); TestHelper(loss, -3, -5, 4, 4); }
public void OnlineGradientDescent() { var env = new MLContext(seed: 0); var dataPath = GetDataPath(TestDatasets.generatedRegressionDataset.trainFilename); var dataSource = new MultiFileSource(dataPath); var ctx = new RegressionContext(env); var reader = TextLoader.CreateReader(env, c => (label: c.LoadFloat(11), features: c.LoadFloat(0, 10)), separator: ';', hasHeader: true); LinearRegressionModelParameters pred = null; var loss = new SquaredLoss(); var est = reader.MakeNewEstimator() .Append(r => (r.label, score: ctx.Trainers.OnlineGradientDescent(r.label, r.features, lossFunction: loss, onFit: (p) => { pred = p; }))); var pipe = reader.Append(est); Assert.Null(pred); var model = pipe.Fit(dataSource); Assert.NotNull(pred); // 11 input features, so we ought to have 11 weights. VBuffer <float> weights = new VBuffer <float>(); pred.GetFeatureWeights(ref weights); Assert.Equal(11, weights.Length); var data = model.Read(dataSource); var metrics = ctx.Evaluate(data, r => r.label, r => r.score, new PoissonLoss()); // Run a sanity check against a few of the metrics. Assert.InRange(metrics.L1, 0, double.PositiveInfinity); Assert.InRange(metrics.L2, 0, double.PositiveInfinity); Assert.InRange(metrics.Rms, 0, double.PositiveInfinity); Assert.Equal(metrics.Rms * metrics.Rms, metrics.L2, 5); Assert.InRange(metrics.LossFn, 0, double.PositiveInfinity); }