private static ITransformer BuildTrainEvaluateAndSaveModel(MLContext mlContext)
        {
            // STEP 1: Common data loading configuration
            var textLoader       = SentimentAnalysysTextLoaderFactory.CreateTextLoader(mlContext);
            var trainingDataView = textLoader.Read(TrainDataPath);
            var testDataView     = textLoader.Read(TestDataPath);

            // STEP 2: Common data process configuration with pipeline data transformations
            var dataProcessPipeline = mlContext.Transforms.Text.FeaturizeText("Text", "Features");

            // STEP 3: Set the training algorithm, then create and config the modelBuilder
            var modelBuilder = new Common.ModelBuilder <SentimentIssue, SentimentPrediction>(mlContext, dataProcessPipeline);
            var trainer      = mlContext.BinaryClassification.Trainers.FastTree(label: "Label", features: "Features");

            modelBuilder.AddTrainer(trainer);

            // STEP 4: Train the model fitting to the DataSet
            Console.WriteLine("=============== Training the model ===============");
            modelBuilder.Train(trainingDataView);

            // STEP 5: Evaluate the model and show accuracy stats
            Console.WriteLine("===== Evaluating Model's accuracy with Test data =====");
            var metrics = modelBuilder.EvaluateBinaryClassificationModel(testDataView, "Label", "Score");

            Common.ConsoleHelper.PrintBinaryClassificationMetrics(trainer.ToString(), metrics);

            // STEP 6: Save/persist the trained model to a .ZIP file
            Console.WriteLine("=============== Saving the model to a file ===============");
            modelBuilder.SaveModelAsFile(ModelPath);

            return(modelBuilder.TrainedModel);
        }
        private static void BuildTrainEvaluateAndSaveModel(MLContext mlContext)
        {
            // STEP 1: Common data loading configuration
            var textLoader       = IrisTextLoaderFactory.CreateTextLoader(mlContext);
            var trainingDataView = textLoader.Read(TrainDataPath);
            var testDataView     = textLoader.Read(TestDataPath);

            // STEP 2: Common data process configuration with pipeline data transformations
            var dataProcessPipeline = mlContext.Transforms.Concatenate("Features", "SepalLength",
                                                                       "SepalWidth",
                                                                       "PetalLength",
                                                                       "PetalWidth");

            // STEP 3: Set the training algorithm, then create and config the modelBuilder
            var modelBuilder = new Common.ModelBuilder <IrisData, IrisPrediction>(mlContext, dataProcessPipeline);
            // We apply our selected Trainer
            var trainer = mlContext.MulticlassClassification.Trainers.StochasticDualCoordinateAscent(labelColumn: "Label", featureColumn: "Features");

            modelBuilder.AddTrainer(trainer);

            // STEP 4: Train the model fitting to the DataSet
            //The pipeline is trained on the dataset that has been loaded and transformed.
            Console.WriteLine("=============== Training the model ===============");
            modelBuilder.Train(trainingDataView);

            // STEP 5: Evaluate the model and show accuracy stats
            Console.WriteLine("===== Evaluating Model's accuracy with Test data =====");
            var metrics = modelBuilder.EvaluateMultiClassClassificationModel(testDataView, "Label");

            Common.ConsoleHelper.PrintMultiClassClassificationMetrics(trainer.ToString(), metrics);

            // STEP 6: Save/persist the trained model to a .ZIP file
            Console.WriteLine("=============== Saving the model to a file ===============");
            modelBuilder.SaveModelAsFile(ModelPath);
        }
Beispiel #3
0
        static void Main(string[] args)
        {
            //Create MLContext to be shared across the model creation workflow objects
            //Set a random seed for repeatable/deterministic results across multiple trainings.
            var mlContext = new MLContext();

            // STEP 1: Common data loading configuration
            DataLoader dataLoader       = new DataLoader(mlContext);
            var        trainingDataView = dataLoader.GetDataView(TrainDataPath);
            var        testDataView     = dataLoader.GetDataView(TestDataPath);

            // STEP 2: Common data process configuration with pipeline data transformations
            var dataProcessor       = new DataProcessor(mlContext);
            var dataProcessPipeline = dataProcessor.DataProcessPipeline;

            // (OPTIONAL) Peek data (such as 2 records) in training DataView after applying the ProcessPipeline's transformations into "Features"
            Common.ConsoleHelper.PeekDataViewInConsole <SentimentIssue>(mlContext, trainingDataView, dataProcessPipeline, 2);
            //Common.ConsoleHelper.PeekVectorColumnDataInConsole(mlContext, "Features", trainingDataView, dataProcessPipeline, 2);

            // STEP 3: Set the training algorithm, then create and config the modelBuilder
            var modelBuilder = new Common.ModelBuilder <SentimentIssue, SentimentPrediction>(mlContext, dataProcessPipeline);
            var trainer      = mlContext.BinaryClassification.Trainers.StochasticDualCoordinateAscent(label: "Label", features: "Features");

            //Other way: var trainer = new LinearClassificationTrainer(mlContext, "Features", "Label");
            modelBuilder.AddTrainer(trainer);

            // STEP 4: Train the model fitting to the DataSet
            Console.WriteLine("=============== Training the model ===============");
            modelBuilder.Train(trainingDataView);

            // STEP 5: Evaluate the model and show accuracy stats
            Console.WriteLine("===== Evaluating Model's accuracy with Test data =====");
            var metrics = modelBuilder.EvaluateBinaryClassificationModel(testDataView, "Label", "Score");

            Common.ConsoleHelper.PrintBinaryClassificationMetrics("StochasticDualCoordinateAscent", metrics);

            // STEP 6: Save/persist the trained model to a .ZIP file
            Console.WriteLine("=============== Saving the model to a file ===============");
            modelBuilder.SaveModelAsFile(ModelPath);

            // (OPTIONAL) Try/test a single prediction by loding the model from the file, first.
            SentimentIssue sampleStatement = new SentimentIssue {
                Text = "This is a very rude movie"
            };
            var modelScorer = new Common.ModelScorer <SentimentIssue, SentimentPrediction>(mlContext);

            modelScorer.LoadModelFromZipFile(ModelPath);
            var resultprediction = modelScorer.PredictSingle(sampleStatement);

            Console.WriteLine($"=============== Single Prediction  ===============");
            Console.WriteLine($"Text: {sampleStatement.Text} | Prediction: {(Convert.ToBoolean(resultprediction.Prediction) ? "Toxic" : "Nice")} sentiment | Probability: {resultprediction.Probability} ");
            Console.WriteLine($"==================================================");
            //

            Common.ConsoleHelper.ConsoleWriteHeader("=============== End of training process, hit any key to finish ===============");
            Console.ReadKey();
        }
        static void Main(string[] args)
        {
            var assetsPath = ModelHelpers.GetAssetsPath(@"..\..\..\assets");

            var transactionsCsv = Path.Combine(assetsPath, "inputs", "transactions.csv");
            var offersCsv       = Path.Combine(assetsPath, "inputs", "offers.csv");
            var pivotCsv        = Path.Combine(assetsPath, "inputs", "pivot.csv");
            var modelZip        = Path.Combine(assetsPath, "outputs", "retailClustering.zip");

            try
            {
                //DataHelpers.PreProcessAndSave(offersCsv, transactionsCsv, pivotCsv);
                //var modelBuilder = new ModelBuilder(pivotCsv, modelZip, kValuesSvg);
                //modelBuilder.BuildAndTrain();

                //STEP 0: Special data pre-process in this sample creating the PivotTable csv file
                DataHelpers.PreProcessAndSave(offersCsv, transactionsCsv, pivotCsv);

                //Create the MLContext to share across components for deterministic results
                MLContext mlContext = new MLContext(seed: 1);  //Seed set to any number so you have a deterministic environment

                //STEP 1: Common data loading
                DataLoader dataLoader    = new DataLoader(mlContext);
                var        pivotDataView = dataLoader.GetDataView(pivotCsv);

                //STEP 2: Process data transformations in pipeline
                var dataProcessor       = new DataProcessor(mlContext, 2);
                var dataProcessPipeline = dataProcessor.DataProcessPipeline;

                // (Optional) Peek data in training DataView after applying the ProcessPipeline's transformations
                Common.ConsoleHelper.PeekDataViewInConsole <PivotObservation>(mlContext, pivotDataView, dataProcessPipeline, 10);
                Common.ConsoleHelper.PeekVectorColumnDataInConsole(mlContext, "Features", pivotDataView, dataProcessPipeline, 10);

                // STEP 3: Create and train the model
                var trainer      = mlContext.Clustering.Trainers.KMeans("Features", clustersCount: 3);
                var modelBuilder = new Common.ModelBuilder <PivotObservation, ClusteringPrediction>(mlContext, dataProcessPipeline);
                modelBuilder.AddTrainer(trainer);
                var trainedModel = modelBuilder.Train(pivotDataView);

                // STEP4: Evaluate accuracy of the model
                var metrics = modelBuilder.EvaluateClusteringModel(pivotDataView);
                Common.ConsoleHelper.PrintClusteringMetrics("KMeans", metrics);

                // STEP5: Save/persist the model as a .ZIP file
                modelBuilder.SaveModelAsFile(modelZip);
            } catch (Exception ex)
            {
                Common.ConsoleHelper.ConsoleWriteException(ex.Message);
            }

            Common.ConsoleHelper.ConsolePressAnyKey();
        }
Beispiel #5
0
        static void Main(string[] args)
        {
            var assetsPath = ModelHelpers.GetAssetsPath(@"..\..\..\assets");

            var transactionsCsv = Path.Combine(assetsPath, "inputs", "transactions.csv");
            var offersCsv       = Path.Combine(assetsPath, "inputs", "offers.csv");
            var pivotCsv        = Path.Combine(assetsPath, "inputs", "pivot.csv");
            var modelZip        = Path.Combine(assetsPath, "outputs", "retailClustering.zip");

            try
            {
                //STEP 0: Special data pre-process in this sample creating the PivotTable csv file
                DataHelpers.PreProcessAndSave(offersCsv, transactionsCsv, pivotCsv);

                //Create the MLContext to share across components for deterministic results
                MLContext mlContext = new MLContext(seed: 1);  //Seed set to any number so you have a deterministic environment

                // STEP 1: Common data loading configuration
                var textLoader    = CustomerSegmentationTextLoaderFactory.CreateTextLoader(mlContext);
                var pivotDataView = textLoader.Read(pivotCsv);

                //STEP 2: Configure data transformations in pipeline
                var dataProcessPipeline = new PrincipalComponentAnalysisEstimator(mlContext, "Features", "PCAFeatures", rank: 2)
                                          .Append(new OneHotEncodingEstimator(mlContext, new[] { new OneHotEncodingEstimator.ColumnInfo("LastName",
                                                                                                                                        "LastNameKey",
                                                                                                                                        CategoricalTransform.OutputKind.Ind) }));
                // (Optional) Peek data in training DataView after applying the ProcessPipeline's transformations
                Common.ConsoleHelper.PeekDataViewInConsole <PivotObservation>(mlContext, pivotDataView, dataProcessPipeline, 10);
                Common.ConsoleHelper.PeekVectorColumnDataInConsole(mlContext, "Features", pivotDataView, dataProcessPipeline, 10);

                // STEP 3: Create and train the model
                var trainer      = mlContext.Clustering.Trainers.KMeans("Features", clustersCount: 3);
                var modelBuilder = new Common.ModelBuilder <PivotObservation, ClusteringPrediction>(mlContext, dataProcessPipeline);
                modelBuilder.AddTrainer(trainer);
                var trainedModel = modelBuilder.Train(pivotDataView);

                // STEP4: Evaluate accuracy of the model
                var metrics = modelBuilder.EvaluateClusteringModel(pivotDataView);
                Common.ConsoleHelper.PrintClusteringMetrics(trainer.ToString(), metrics);

                // STEP5: Save/persist the model as a .ZIP file
                modelBuilder.SaveModelAsFile(modelZip);
            } catch (Exception ex)
            {
                Common.ConsoleHelper.ConsoleWriteException(ex.Message);
            }

            Common.ConsoleHelper.ConsolePressAnyKey();
        }
Beispiel #6
0
        private static ITransformer BuildTrainEvaluateAndSaveModel(MLContext mlContext)
        {
            // STEP 1: Common data loading configuration
            DataLoader dataLoader       = new DataLoader(mlContext);
            var        trainingDataView = dataLoader.GetDataView(TrainDataPath);
            var        testDataView     = dataLoader.GetDataView(TestDataPath);

            // STEP 2: Common data process configuration with pipeline data transformations
            var dataProcessor       = new DataProcessor(mlContext);
            var dataProcessPipeline = dataProcessor.DataProcessPipeline;

            // (OPTIONAL) Peek data (such as 5 records) in training DataView after applying the ProcessPipeline's transformations into "Features"
            Common.ConsoleHelper.PeekDataViewInConsole <TaxiTrip>(mlContext, trainingDataView, dataProcessPipeline, 5);
            Common.ConsoleHelper.PeekVectorColumnDataInConsole(mlContext, "Features", trainingDataView, dataProcessPipeline, 5);

            // STEP 3: Set the training algorithm, then create and config the modelBuilder
            var modelBuilder = new Common.ModelBuilder <TaxiTrip, TaxiTripFarePrediction>(mlContext, dataProcessPipeline);
            // We apply our selected Trainer (SDCA Regression algorithm)
            var trainer = mlContext.Regression.Trainers.StochasticDualCoordinateAscent(label: "Label", features: "Features");

            modelBuilder.AddTrainer(trainer);

            // STEP 4: Train the model fitting to the DataSet
            //The pipeline is trained on the dataset that has been loaded and transformed.
            Console.WriteLine("=============== Training the model ===============");
            modelBuilder.Train(trainingDataView);

            // STEP 5: Evaluate the model and show accuracy stats
            Console.WriteLine("===== Evaluating Model's accuracy with Test data =====");
            var metrics = modelBuilder.EvaluateRegressionModel(testDataView, "Label", "Score");

            Common.ConsoleHelper.PrintRegressionMetrics("StochasticDualCoordinateAscent", metrics);

            // STEP 6: Save/persist the trained model to a .ZIP file
            Console.WriteLine("=============== Saving the model to a file ===============");
            modelBuilder.SaveModelAsFile(ModelPath);

            return(modelBuilder.TrainedModel);
        }
Beispiel #7
0
        public static void CreateTrainAndEvaluateModel(MLContext mlContext)
        {
            // STEP 1: Common data loading configuration
            DataLoader dataLoader       = new DataLoader(mlContext);
            var        trainingDataView = dataLoader.GetDataView(TrainDataPath);
            var        testDataView     = dataLoader.GetDataView(TestDataPath);

            // STEP 2: Common data process configuration with pipeline data transformations
            var dataProcessor       = new DataProcessor(mlContext);
            var dataProcessPipeline = dataProcessor.DataProcessPipeline;

            // (OPTIONAL) Peek data (such as 2 records) in training DataView after applying the ProcessPipeline's transformations into "Features"
            Common.ConsoleHelper.PeekDataViewInConsole <TitanicData>(mlContext, trainingDataView, dataProcessPipeline, 2);
            Common.ConsoleHelper.PeekVectorColumnDataInConsole(mlContext, "Features", trainingDataView, dataProcessPipeline, 2);

            // STEP 3: Set the training algorithm, then create and config the modelBuilder
            // FastTreeBinaryClassifier is an algorithm that will be used to train the model.
            // It has three hyperparameters for tuning decision tree performance.
            //pipeline.Add(new FastTreeBinaryClassifier());// {NumLeaves = 5, NumTrees = 5, MinDocumentsInLeafs = 2});
            var modelBuilder = new Common.ModelBuilder <TitanicData, TitanicPrediction>(mlContext, dataProcessPipeline);
            var trainer      = mlContext.BinaryClassification.Trainers.FastTree(label: "Label", features: "Features", numLeaves: 10, numTrees: 5, minDatapointsInLeafs: 10);

            modelBuilder.AddTrainer(trainer);

            // STEP 4: Train the model fitting to the DataSet
            Console.WriteLine("=============== Training the model ===============");
            modelBuilder.Train(trainingDataView);

            // STEP 5: Evaluate the model and show accuracy stats
            Console.WriteLine("===== Evaluating Model's accuracy with Test data =====");
            var metrics = modelBuilder.EvaluateBinaryClassificationModel(testDataView, "Label", "Score");

            Common.ConsoleHelper.PrintBinaryClassificationMetrics(trainer.ToString(), metrics);

            // STEP 6: Save/persist the trained model to a .ZIP file
            Console.WriteLine("=============== Saving the model to a file ===============");
            modelBuilder.SaveModelAsFile(ModelPath);
        }
Beispiel #8
0
        private static ITransformer BuildTrainEvaluateAndSaveModel(MLContext mlContext)
        {
            // STEP 1: Common data loading configuration
            DataLoader dataLoader       = new DataLoader(mlContext);
            var        trainingDataView = dataLoader.GetDataView(TrainDataPath);
            var        testDataView     = dataLoader.GetDataView(TestDataPath);

            // STEP 2: Common data process configuration with pipeline data transformations
            var dataProcessor       = new DataProcessor(mlContext);
            var dataProcessPipeline = dataProcessor.DataProcessPipeline;

            // (OPTIONAL) Peek data (such as 2 records) in training DataView after applying the ProcessPipeline's transformations into "Features"
            Common.ConsoleHelper.PeekDataViewInConsole <SentimentIssue>(mlContext, trainingDataView, dataProcessPipeline, 2);
            //Common.ConsoleHelper.PeekVectorColumnDataInConsole(mlContext, "Features", trainingDataView, dataProcessPipeline, 2);

            // STEP 3: Set the training algorithm, then create and config the modelBuilder
            var modelBuilder = new Common.ModelBuilder <SentimentIssue, SentimentPrediction>(mlContext, dataProcessPipeline);
            var trainer      = mlContext.BinaryClassification.Trainers.FastTree(label: "Label", features: "Features");

            modelBuilder.AddTrainer(trainer);

            // STEP 4: Train the model fitting to the DataSet
            Console.WriteLine("=============== Training the model ===============");
            modelBuilder.Train(trainingDataView);

            // STEP 5: Evaluate the model and show accuracy stats
            Console.WriteLine("===== Evaluating Model's accuracy with Test data =====");
            var metrics = modelBuilder.EvaluateBinaryClassificationModel(testDataView, "Label", "Score");

            Common.ConsoleHelper.PrintBinaryClassificationMetrics(trainer.ToString(), metrics);

            // STEP 6: Save/persist the trained model to a .ZIP file
            Console.WriteLine("=============== Saving the model to a file ===============");
            modelBuilder.SaveModelAsFile(ModelPath);

            return(modelBuilder.TrainedModel);
        }