public void RegularizedLinearRegression_ArtificaialFunction() { // Given var splitter = new CrossValidator <double>(); Func <IList <double>, double> scoreFunc = list => 0.3 + (0.5 * list[0]) + (-0.3 * list[1]) + (0.7 * list[2]); var allData = TestDataBuilder.BuildRandomAbstractNumericDataFrame( scoreFunc, featuresCount: 3, min: 0, max: 1, rowCount: 1000); var subject = new RegularizedLinearRegressionModelBuilder(0.5); var regParams = new LinearRegressionParams(0.05); // When var accuracies = splitter.CrossValidate( modelBuilder: subject, modelBuilderParams: regParams, predictor: new LinearRegressionPredictor(), qualityMeasure: new GoodnessOfFitQualityMeasure(), dataFrame: allData, dependentFeatureName: "result", percetnagOfTrainData: 0.8, folds: 20); // Then Assert.IsTrue(accuracies.Select(acc => acc.Accuracy).Average() >= 0.9); }
public void TestRegressionAndModelLeafBuilder_RandomizedData() { // Given // Data generated with equation: 3 + (data["x1"] * 2) + (data["x2"] * 3) + (data["x3"] * 4) Func <IList <double>, double> resultCalc = list => 3 + (list[0] * 2) + (list[1] * 3) + (list[2] * 4); var testData = TestDataBuilder.BuildRandomAbstractNumericDataFrame(resultCalc, 100000, 3, min: 1, max: 100); var leafBuilder = new RegressionAndModelDecisionTreeLeafBuilder(new GradientDescentModelBuilder(-5, 5, iterCount: 3000)); // When var result = leafBuilder.BuildLeaf(testData, "result") as IDecisionTreeRegressionAndModelLeaf; //Then Assert.AreEqual(3.0, result.ModelWeights[0], 0.01); Assert.AreEqual(2.0, result.ModelWeights[1], 0.01); Assert.AreEqual(3.0, result.ModelWeights[2], 0.01); Assert.AreEqual(4.0, result.ModelWeights[3], 0.01); }