public void SaveAndLoadModel() { /// /// Train a model /// var xgbTrainer = new XGBoost.XGBClassifier(); int countTrainingPoints = 20; entity.XGBArray trainClass1 = Util.GenerateRandom2dPoints(countTrainingPoints / 2, -1.0, 0.0, 0.0, 1.0, 0.0); //Top left quadrant entity.XGBArray trainClass2 = Util.GenerateRandom2dPoints(countTrainingPoints / 2, 0.0, 1.0, -1.0, 0.0, 1.0); //Bot right quadrant entity.XGBArray train_Class1_Class2 = Util.UnionOfXGBArray(trainClass1, trainClass2); xgbTrainer.Fit(train_Class1_Class2.Vectors, train_Class1_Class2.Labels); /// /// Save the model /// string fileModel = "MyLinearModel.dat"; if (System.IO.File.Exists(fileModel)) { System.IO.File.Delete(fileModel); } xgbTrainer.SaveModelToFile(fileModel); /// /// Load the saved model /// var xgbProduction = XGBoost.XGBClassifier.LoadClassifierFromFile(fileModel); int countTestingPoints = 50; entity.XGBArray testClass1 = Util.GenerateRandom2dPoints(countTestingPoints / 2, -0.8, -0.2, 0.2, 0.8, 0.0); //Top left quadrant entity.XGBArray testClass2 = Util.GenerateRandom2dPoints(countTestingPoints / 2, 0.2, 0.8, -0.8, -0.2, 1.0); //Bot right quadrant entity.XGBArray test_Class1_Class2 = Util.UnionOfXGBArray(testClass1, testClass2); var results = xgbProduction.Predict(test_Class1_Class2.Vectors); CollectionAssert.AreEqual(results, test_Class1_Class2.Labels); }
public void TestMethod1() { var xgb = new XGBoost.XGBClassifier(); /// /// Generate training vectors /// int countTrainingPoints = 50; entity.XGBArray trainClass_0_1 = Util.GenerateRandom2dPoints(countTrainingPoints / 2, 0.0, 0.5, 0.5, 1.0, 1.0);//0,1 entity.XGBArray trainClass_1_0 = Util.GenerateRandom2dPoints(countTrainingPoints / 2, 0.5, 1.0, 0.0, 0.5, 1.0);//1,0 entity.XGBArray trainClass_0_0 = Util.GenerateRandom2dPoints(countTrainingPoints / 2, 0.0, 0.5, 0.0, 0.5, 0.0);//0,0 entity.XGBArray trainClass_1_1 = Util.GenerateRandom2dPoints(countTrainingPoints / 2, 0.5, 1.0, 0.5, 1.0, 0.0);//1,1 /// /// Train the model /// entity.XGBArray allVectorsTraining = Util.UnionOfXGBArrays(trainClass_0_1, trainClass_1_0, trainClass_0_0, trainClass_1_1); xgb.Fit(allVectorsTraining.Vectors, allVectorsTraining.Labels); /// /// Test the model /// int countTestingPoints = 10; entity.XGBArray testClass_0_1 = Util.GenerateRandom2dPoints(countTestingPoints, 0.1, 0.4, 0.6, 0.9, 1.0);//0,1 entity.XGBArray testClass_1_0 = Util.GenerateRandom2dPoints(countTestingPoints, 0.6, 0.9, 0.1, 0.4, 1.0);//1,0 entity.XGBArray testClass_0_0 = Util.GenerateRandom2dPoints(countTestingPoints, 0.1, 0.4, 0.1, 0.4, 0.0);//0,0 entity.XGBArray testClass_1_1 = Util.GenerateRandom2dPoints(countTestingPoints, 0.6, 0.9, 0.6, 0.9, 0.0);//1,1 entity.XGBArray allVectorsTest = Util.UnionOfXGBArrays(testClass_0_1, testClass_1_0, testClass_0_0, testClass_1_1); var resultsActual = xgb.Predict(allVectorsTest.Vectors); CollectionAssert.AreEqual(resultsActual, allVectorsTest.Labels); }
public void TrainAndTestIris() { /// /// Load training vectors /// string filenameTrain = "Iris\\Iris.train.data"; iris.Iris[] recordsTrain = IrisUtils.LoadIris(filenameTrain); entity.XGVector <iris.Iris>[] vectorsTrain = IrisUtils.ConvertFromIrisToFeatureVectors(recordsTrain); /// /// Load testingvectors /// string filenameTest = "Iris\\Iris.test.data"; iris.Iris[] recordsTest = IrisUtils.LoadIris(filenameTest); entity.XGVector <iris.Iris>[] vectorsTest = IrisUtils.ConvertFromIrisToFeatureVectors(recordsTest); int noOfClasses = 3; var xgbc = new XGBoost.XGBClassifier(objective: "multi:softprob", numClass: 3); entity.XGBArray arrTrain = Util.ConvertToXGBArray(vectorsTrain); entity.XGBArray arrTest = Util.ConvertToXGBArray(vectorsTest); xgbc.Fit(arrTrain.Vectors, arrTrain.Labels); var outcomeTest = xgbc.Predict(arrTest.Vectors); for (int index = 0; index < arrTest.Vectors.Length; index++) { string sExpected = IrisUtils.ConvertLabelFromNumericToString(arrTest.Labels[index]); float[] arrResults = new float[] { outcomeTest[index * noOfClasses + 0], outcomeTest[index * noOfClasses + 1], outcomeTest[index * noOfClasses + 2] }; float max = arrResults.Max(); int indexWithMaxValue = Util.GetIndexWithMaxValue(arrResults); string sActualClass = IrisUtils.ConvertLabelFromNumericToString((float)indexWithMaxValue); Trace.WriteLine($"{index} Expected={sExpected} Actual={sActualClass}"); Assert.AreEqual(sActualClass, sExpected); } string pathFull = System.IO.Path.Combine(Util.GetProjectDir2(), _fileModelIris); xgbc.SaveModelToFile(pathFull); }
public void LinearClassification2() { var xgb = new XGBoost.XGBClassifier(); int countTrainingPoints = 20; entity.XGBArray trainClass1 = Util.GenerateRandom2dPoints(countTrainingPoints / 2, -1.0, 0.0, 0.0, 1.0, 0.0); //Top left quadrant entity.XGBArray trainClass2 = Util.GenerateRandom2dPoints(countTrainingPoints / 2, 0.0, 1.0, -1.0, 0.0, 1.0); //Bot right quadrant entity.XGBArray train_Class1_Class2 = Util.UnionOfXGBArray(trainClass1, trainClass2); xgb.Fit(train_Class1_Class2.Vectors, train_Class1_Class2.Labels); int countTestingPoints = 50; entity.XGBArray testClass1 = Util.GenerateRandom2dPoints(countTestingPoints / 2, -0.8, -0.2, 0.2, 0.8, 0.0); //Top left quadrant entity.XGBArray testClass2 = Util.GenerateRandom2dPoints(countTestingPoints / 2, 0.2, 0.8, -0.8, -0.2, 1.0); //Bot right quadrant entity.XGBArray test_Class1_Class2 = Util.UnionOfXGBArray(testClass1, testClass2); var results = xgb.Predict(test_Class1_Class2.Vectors); CollectionAssert.AreEqual(results, test_Class1_Class2.Labels); }
public void LinearClassification1() { var xgb = new XGBoost.XGBClassifier(); float[][] vectorsTrain = new float[][] { new[] { 0.5f, 0.5f }, new[] { 0.6f, 0.6f }, new[] { 0.6f, 0.4f }, new[] { 0.4f, 0.6f }, new[] { 0.4f, 0.4f }, new[] { -0.5f, -0.5f }, new[] { -0.6f, -0.6f }, new[] { -0.6f, -0.4f }, new[] { -0.4f, -0.6f }, new[] { -0.4f, -0.4f }, }; var lablesTrain = new[] { 1.0f, 1.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, 0.0f, }; /// /// Ensure count of training labels=count of training vectors /// Assert.AreEqual(vectorsTrain.Length, lablesTrain.Length); /// /// Train the model /// xgb.Fit(vectorsTrain, lablesTrain); /// /// Test the model using test vectors /// float[][] vectorsTest = new float[][] { new[] { 0.55f, 0.55f }, new[] { 0.55f, 0.45f }, new[] { 0.45f, 0.55f }, new[] { 0.45f, 0.45f }, new[] { -0.55f, -0.55f }, new[] { -0.55f, -0.45f }, new[] { -0.45f, -0.55f }, new[] { -0.45f, -0.45f }, }; var labelsTestExpected = new[] { 1.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f, 0.0f, }; float[] labelsTestPredicted = xgb.Predict(vectorsTest); /// /// Verify that predicted labels match the expected labels /// CollectionAssert.AreEqual(labelsTestPredicted, labelsTestExpected); }