public string Convert(DataTable source, ITransformer transformer) { transformer = transformer ?? new NoTransformation(); var objects = DataTableToObjectsConverter.Convert(source); var transformedObjects = objects.Select(transformer.Transform).ToArray(); return serializer.Serialize(transformedObjects); }
/// <summary> /// Translates a code of asset written on TypeScript to JS-code /// </summary> /// <param name="asset">Asset</param> /// <param name="transformer">Transformer</param> /// <param name="isDebugMode">Flag that web application is in debug mode</param> /// <returns>Translated asset</returns> protected override IAsset TranslateAsset(IAsset asset, ITransformer transformer, bool isDebugMode) { IAsset processedAsset = InnerTranslateAsset<TypeScriptTranslator>( Constants.TranslatorName.TypeScriptTranslator, asset, transformer, isDebugMode); return processedAsset; }
/// <summary> /// Translates a code of asset written on EcmaScript2015 to JS-code /// </summary> /// <param name="asset">Asset</param> /// <param name="transformer">Transformer</param> /// <param name="isDebugMode">Flag that web application is in debug mode</param> /// <returns>Translated asset</returns> protected override IAsset TranslateAsset(IAsset asset, ITransformer transformer, bool isDebugMode) { IAsset processedAsset = InnerTranslateAsset<BabelJsTranslator>( AssetTypeCode.EcmaScript2015, asset, transformer, isDebugMode); return processedAsset; }
public UsersController(IUserRegisterer userRegisterer, IAuthTokenGenerator authTokenGenerator, IUserRetriever userRetriever, ITransformer transformer) { this.userRegisterer = userRegisterer; this.authTokenGenerator = authTokenGenerator; this.userRetriever = userRetriever; this.transformer = transformer; }
/// <summary> /// Monitor pentru starea firului de execuţie de rezolvare a cererilor. /// </summary> //private ManualResetEvent tcpClientConnected; /// <param name="iTransformer">Transformatorul de cereri în răspuns</param> /// <param name="port">Portul pe care se ascultă</param> public Replyer(ITransformer iTransformer, int port) { this.iTransformer = iTransformer; TcpListener tcpl = new TcpListener(IPAddress.Any, port); tcpl.Start(); while (true) { try { while (!tcpl.Pending()) { Thread.Sleep(1000); } TcpClient client = tcpl.AcceptTcpClient(); ThreadPool.QueueUserWorkItem(receiveTransformAndSendFeedback, client); } catch (ThreadAbortException) { tcpl.Stop(); return; } } }
/// <summary> /// Initializes a new instance of the <see cref="AggregatedTransformer"/> class. /// </summary> /// <param name="children">The child transforms.</param> protected AggregatedTransformer(params ITransformer[] children) { if (children == null) children = new ITransformer[] { }; transforms = children; }
public At(int positionOffset, ITransformer transformer) { this.offset = positionOffset; this.transformer = transformer; this.name = string.Format("{0} {1}", transformer.Name, (offset == 0 ? "" : (offset > 0 ? "+" : "-") + Math.Abs(offset))).Trim(); }
public void Convert(DataTable source, ITransformer transformer, StreamWriter writer) { transformer = transformer ?? new NoTransformation(); var objects = DataTableToObjectsConverter.Convert(source); var transformedObjects = objects.Select(transformer.Transform).ToArray(); serializer.Serialize(transformedObjects, writer); }
/// <summary> /// Initializes a new instance of the <see cref="PredicatedTransformer"/> class. /// </summary> /// <param name="test">The test.</param> /// <param name="passesTransform">The passes transform.</param> /// <param name="failsTransform">The fails transform.</param> public PredicatedTransformer(Predicate<Int32> test, ITransformer passesTransform, ITransformer failsTransform) { Helpers.Arguments.NotNull(test, "test"); Helpers.Arguments.NotNull(passesTransform, "passesTransform"); Helpers.Arguments.NotNull(failsTransform, "failsTransform"); _Test = test; _PassesTransform = passesTransform; _FailsTrasnform = failsTransform; }
public PlayedGamesController( IPlayedGameRetriever playedGameRetriever, IExcelGenerator excelGenerator, IPlayedGameSaver playedGameSaver, IPlayedGameDeleter playedGameDeleter, ITransformer transformer) { this.playedGameRetriever = playedGameRetriever; this.excelGenerator = excelGenerator; this.playedGameSaver = playedGameSaver; this.playedGameDeleter = playedGameDeleter; this.transformer = transformer; }
public RowParser(int numberOfColumns, AdditionalColumnsProcessing additionalColumnsProcessing, ITransformer columnExploder) { if (columnExploder == null) { throw new ArgumentNullException("columnExploder"); } _numberOfColumns = numberOfColumns; _additionalColumnsProcessing = additionalColumnsProcessing; _columnExploder = columnExploder; }
public HomeController( IRecentPublicGamesRetriever recentPublicGamesRetriever, ITopGamingGroupsRetriever topGamingGroupsRetriever, ITrendingGamesRetriever trendingGamesRetriever, ITransformer transformer, IRecentPlayerAchievementsUnlockedRetreiver recentPlayerAchievementsUnlockedRetreiver, IMapperFactory mapperFactory, IDataContext dataContext) { _recentPublicGamesRetriever = recentPublicGamesRetriever; _topGamingGroupsRetriever = topGamingGroupsRetriever; _trendingGamesRetriever = trendingGamesRetriever; _transformer = transformer; _recentPlayerAchievementsUnlockedRetreiver = recentPlayerAchievementsUnlockedRetreiver; _mapperFactory = mapperFactory; }
public GameDefinitionController(IGameDefinitionRetriever gameDefinitionRetriever, ITrendingGamesRetriever trendingGamesRetriever, IGameDefinitionDetailsViewModelBuilder gameDefinitionTransformation, IGameDefinitionSaver gameDefinitionCreator, IBoardGameGeekApiClient boardGameGeekApiClient, IUserRetriever userRetriever, IBoardGameGeekGamesImporter boardGameGeekGamesImporter, ITransformer transformer) { _gameDefinitionRetriever = gameDefinitionRetriever; _trendingGamesRetriever = trendingGamesRetriever; _gameDefinitionTransformation = gameDefinitionTransformation; _gameDefinitionSaver = gameDefinitionCreator; _boardGameGeekApiClient = boardGameGeekApiClient; _userRetriever = userRetriever; _boardGameGeekGamesImporter = boardGameGeekGamesImporter; _transformer = transformer; }
private static void FileSystemWatcher_Changed(object sender, FileSystemEventArgs e) { Console.WriteLine("New model detected!"); trainedModel = null; LoadModel(); }
public static void IidSpikeDetectorPrediction() { // Create a new ML context, for ML.NET operations. It can be used for exception tracking and logging, // as well as the source of randomness. var ml = new MLContext(); // Generate sample series data with a spike const int Size = 10; var data = new List <IidSpikeData>(Size); for (int i = 0; i < Size / 2; i++) { data.Add(new IidSpikeData(5)); } // This is a spike data.Add(new IidSpikeData(10)); for (int i = 0; i < Size / 2; i++) { data.Add(new IidSpikeData(5)); } // Convert data to IDataView. var dataView = ml.Data.ReadFromEnumerable(data); // Setup IidSpikeDetector arguments string outputColumnName = nameof(IidSpikePrediction.Prediction); string inputColumnName = nameof(IidSpikeData.Value); // The transformed model. ITransformer model = ml.Transforms.IidChangePointEstimator(outputColumnName, inputColumnName, 95, Size).Fit(dataView); // Create a time series prediction engine from the model. var engine = model.CreateTimeSeriesPredictionFunction <IidSpikeData, IidSpikePrediction>(ml); for (int index = 0; index < 5; index++) { // Anomaly spike detection. var prediction = engine.Predict(new IidSpikeData(5)); Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}", 5, prediction.Prediction[0], prediction.Prediction[1], prediction.Prediction[2]); } // Spike. var spikePrediction = engine.Predict(new IidSpikeData(10)); Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}", 10, spikePrediction.Prediction[0], spikePrediction.Prediction[1], spikePrediction.Prediction[2]); // Checkpoint the model. var modelPath = "temp.zip"; engine.CheckPoint(ml, modelPath); // Load the model. using (var file = File.OpenRead(modelPath)) model = TransformerChain.LoadFrom(ml, file); for (int index = 0; index < 5; index++) { // Anomaly spike detection. var prediction = engine.Predict(new IidSpikeData(5)); Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}", 5, prediction.Prediction[0], prediction.Prediction[1], prediction.Prediction[2]); } // Data Alert Score P-Value // 5 0 5.00 0.50 // 5 0 5.00 0.50 // 5 0 5.00 0.50 // 5 0 5.00 0.50 // 5 0 5.00 0.50 // 10 1 10.00 0.00 <-- alert is on, predicted spike (check-point model) // 5 0 5.00 0.26 <-- load model from disk. // 5 0 5.00 0.26 // 5 0 5.00 0.50 // 5 0 5.00 0.50 // 5 0 5.00 0.50 }
/// <summary> /// Main method used for training the prediction model /// </summary> /// <returns>True if successful otherwise false if some error occures</returns> public bool TrainModel() { try { ProjectDirectory = Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, "../../../")); //set main directory path WorkspaceRelativePath = Path.Combine(ProjectDirectory, "workspace"); // set workspace path AssetsRelativePath = Path.Combine(ProjectDirectory, "assets"); //set assets path ClearWorkspace(); MLContext context = new MLContext(); // Machine Learning Context IEnumerable <ImageData> images = ImageLoader.LoadImagesFromDirectory(folder: AssetsRelativePath, useFolderNameAsLabel: true); // Load images from assets dir IDataView imageData = context.Data.LoadFromEnumerable(images); //fundamental pipeline IDataView shuffledData = context.Data.ShuffleRows(imageData); // shuffle rows od the pipeline //Creates a Estimator which converts categorical values into numerical keys from InputModel class var preprocessingPipeline = context.Transforms.Conversion.MapValueToKey( inputColumnName: "Label", outputColumnName: "LabelAsKey") .Append(context.Transforms.LoadRawImageBytes( outputColumnName: "Image", imageFolder: AssetsRelativePath, inputColumnName: "ImagePath")); //Pre processsed data used for training/testing/validating the model IDataView preProcessedData = preprocessingPipeline .Fit(shuffledData) .Transform(shuffledData); //Declare to split into 3 categories train / test / validate TrainTestData trainSplit = context.Data.TrainTestSplit(data: preProcessedData, testFraction: 0.3); TrainTestData validationTestSplit = context.Data.TrainTestSplit(trainSplit.TestSet); IDataView trainSet = trainSplit.TrainSet; // get the training set IDataView validationSet = validationTestSplit.TrainSet; // get the validation set IDataView testSet = validationTestSplit.TestSet; // get the test set //Image trainer options used for training the image classification model var classifierOptions = new ImageClassificationTrainer.Options() { FeatureColumnName = "Image", LabelColumnName = "LabelAsKey", ValidationSet = validationSet, Arch = ImageClassificationTrainer.Architecture.ResnetV2101, MetricsCallback = (metrics) => Console.WriteLine(metrics), TestOnTrainSet = false, Epoch = 100, BatchSize = 10, ReuseTrainSetBottleneckCachedValues = true, ReuseValidationSetBottleneckCachedValues = true, WorkspacePath = WorkspaceRelativePath }; //create a training pipeline and append output of prediction as PredictedLabel from OutputModel class var trainingPipeline = context.MulticlassClassification.Trainers.ImageClassification(classifierOptions) .Append(context.Transforms.Conversion.MapKeyToValue("PredictedLabel")); //Main trained model used for making image predictions ITransformer trainedModel = trainingPipeline.Fit(trainSet); //Prediction engine used for prediction. Created from the trained model PredictionEngine = context.Model.CreatePredictionEngine <InputModel, OutputModel>(trainedModel); return(true); } catch (Exception e) { Console.WriteLine(e.Message); return(false); } }
public ITransformer LoadModel(string modelPath) { _trainedModel = _mlContext.Model.Load(modelPath, out var modelInputSchema); return(_trainedModel); }
protected abstract void UpdateTransformer(ITransformer transformer, ITransformer parent);
public Analyzer(IReader reader, ITransformer transformer) { this.reader = reader; this.transformer = transformer; }
public static void SaveModel(MLContext mlContext, DataViewSchema trainingDataViewSchema, ITransformer model) { results.Add("Saving the model to a file..."); mlContext.Model.Save(model, trainingDataViewSchema, Config.ModelFile); }
public static void Train(string filePathTrain, string filePathTest, string fileMLTrainedModel) { try { // Skip the training of the model if model already exists. if (System.IO.File.Exists(fileMLTrainedModel)) { return; } /** * Copied the following code chunk from the official Github of ML.NET * * https://github.com/dotnet/machinelearning-samples/blob/master/samples/csharp/getting-started/MulticlassClassification_MNIST/MNIST/Program.cs */ // STEP 1: Common data loading configuration var trainData = mlContext.Data.LoadFromTextFile(path: filePathTrain, columns: new[] { new TextLoader.Column(nameof(InputData.PixelValues), DataKind.Single, 0, 63), new TextLoader.Column("Number", DataKind.Single, 64) }, hasHeader: false, separatorChar: ',' ); var testData = mlContext.Data.LoadFromTextFile(path: filePathTest, columns: new[] { new TextLoader.Column(nameof(InputData.PixelValues), DataKind.Single, 0, 63), new TextLoader.Column("Number", DataKind.Single, 64) }, hasHeader: false, separatorChar: ',' ); // STEP 2: Common data process configuration with pipeline data transformations // Use in-memory cache for small/medium datasets to lower training time. Do NOT use it (remove .AppendCacheCheckpoint()) when handling very large datasets. var dataProcessPipeline = mlContext.Transforms.Conversion.MapValueToKey("Label", "Number", keyOrdinality: ValueToKeyMappingEstimator.KeyOrdinality.ByValue). Append(mlContext.Transforms.Concatenate("Features", nameof(InputData.PixelValues)).AppendCacheCheckpoint(mlContext)); // STEP 3: Set the training algorithm, then create and config the modelBuilder var trainer = mlContext.MulticlassClassification.Trainers.SdcaMaximumEntropy(labelColumnName: "Label", featureColumnName: "Features"); var trainingPipeline = dataProcessPipeline.Append(trainer).Append(mlContext.Transforms.Conversion.MapKeyToValue("Number", "Label")); // STEP 4: Train the model fitting to the DataSet Console.WriteLine("=============== Training the model ==============="); ITransformer trainedModel = trainingPipeline.Fit(trainData); Console.WriteLine("===== Evaluating Model's accuracy with Test data ====="); var predictions = trainedModel.Transform(testData); var metrics = mlContext.MulticlassClassification.Evaluate(data: predictions, labelColumnName: "Number", scoreColumnName: "Score"); ///Common.ConsoleHelper.PrintMultiClassClassificationMetrics(trainer.ToString(), metrics); mlContext.Model.Save(trainedModel, trainData.Schema, fileMLTrainedModel); Console.WriteLine("The model is saved to {0}", fileMLTrainedModel); /* * End Copy */ } catch (Exception e) { Console.WriteLine("ERROR " + e.Message); } }
/// <summary> /// Save the model to the stream. /// </summary> /// <param name="model">The trained model to be saved.</param> /// <param name="stream">A writeable, seekable stream to save to.</param> public void Save(ITransformer model, Stream stream) => model.SaveTo(_env, stream);
public InnerTransformHandler(ITransformer transformer) { _transformer = transformer; }
public Current(ITransformer transformer) { this.name = "Token " + transformer.Name; this.transformer = transformer; }
public GameDefinitionSummaryViewModelBuilder(ITransformer transformer, IWeightTierCalculator weightTierCalculator) { this._transformer = transformer; _weightTierCalculator = weightTierCalculator; }
public Next(int pos, ITransformer transformer) : base(pos, transformer) { if (pos <= 0) throw new ArgumentOutOfRangeException(); }
public Next(ITransformer transformer) : this(1, transformer) { }
// This example creates a time series (list of Data with the i-th element corresponding to the i-th time slot). // IidChangePointDetector is applied then to identify points where data distribution changed using time series // prediction engine. The engine is checkpointed and then loaded back from disk into memory and used for prediction. public static void IidChangePointDetectorPrediction() { // Create a new ML context, for ML.NET operations. It can be used for exception tracking and logging, // as well as the source of randomness. var ml = new MLContext(); // Generate sample series data with a change const int Size = 16; var data = new List <IidChangePointData>(Size); for (int i = 0; i < Size / 2; i++) { data.Add(new IidChangePointData(5)); } // This is a change point for (int i = 0; i < Size / 2; i++) { data.Add(new IidChangePointData(7)); } // Convert data to IDataView. var dataView = ml.Data.ReadFromEnumerable(data); // Setup IidSpikeDetector arguments string outputColumnName = nameof(ChangePointPrediction.Prediction); string inputColumnName = nameof(IidChangePointData.Value); // Time Series model. ITransformer model = ml.Transforms.IidChangePointEstimator(outputColumnName, inputColumnName, 95, Size / 4).Fit(dataView); // Create a time series prediction engine from the model. var engine = model.CreateTimeSeriesPredictionFunction <IidChangePointData, ChangePointPrediction>(ml); for (int index = 0; index < 8; index++) { // Anomaly change point detection. var prediction = engine.Predict(new IidChangePointData(5)); Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}\t{4:0.00}", 5, prediction.Prediction[0], prediction.Prediction[1], prediction.Prediction[2], prediction.Prediction[3]); } // Change point var changePointPrediction = engine.Predict(new IidChangePointData(7)); Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}\t{4:0.00}", 7, changePointPrediction.Prediction[0], changePointPrediction.Prediction[1], changePointPrediction.Prediction[2], changePointPrediction.Prediction[3]); // Checkpoint the model. var modelPath = "temp.zip"; engine.CheckPoint(ml, modelPath); // Reference to current time series engine because in the next step "engine" will point to the // checkpointed model being loaded from disk. var timeseries1 = engine; // Load the model. using (var file = File.OpenRead(modelPath)) model = TransformerChain.LoadFrom(ml, file); // Create a time series prediction engine from the checkpointed model. engine = model.CreateTimeSeriesPredictionFunction <IidChangePointData, ChangePointPrediction>(ml); for (int index = 0; index < 8; index++) { // Anomaly change point detection. var prediction = engine.Predict(new IidChangePointData(7)); Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}\t{4:0.00}", 7, prediction.Prediction[0], prediction.Prediction[1], prediction.Prediction[2], prediction.Prediction[3]); } // Prediction from the original time series engine should match the prediction from // check pointed model. engine = timeseries1; for (int index = 0; index < 8; index++) { // Anomaly change point detection. var prediction = engine.Predict(new IidChangePointData(7)); Console.WriteLine("{0}\t{1}\t{2:0.00}\t{3:0.00}\t{4:0.00}", 7, prediction.Prediction[0], prediction.Prediction[1], prediction.Prediction[2], prediction.Prediction[3]); } // Data Alert Score P-Value Martingale value // 5 0 5.00 0.50 0.00 <-- Time Series 1. // 5 0 5.00 0.50 0.00 // 5 0 5.00 0.50 0.00 // 5 0 5.00 0.50 0.00 // 5 0 5.00 0.50 0.00 // 5 0 5.00 0.50 0.00 // 5 0 5.00 0.50 0.00 // 5 0 5.00 0.50 0.00 // 7 1 7.00 0.00 10298.67 <-- alert is on, predicted changepoint (and model is checkpointed). // 7 0 7.00 0.13 33950.16 <-- Time Series 2 : Model loaded back from disk and prediction is made. // 7 0 7.00 0.26 60866.34 // 7 0 7.00 0.38 78362.04 // 7 0 7.00 0.50 0.01 // 7 0 7.00 0.50 0.00 // 7 0 7.00 0.50 0.00 // 7 0 7.00 0.50 0.00 // 7 0 7.00 0.13 33950.16 <-- Time Series 1 and prediction is made. // 7 0 7.00 0.26 60866.34 // 7 0 7.00 0.38 78362.04 // 7 0 7.00 0.50 0.01 // 7 0 7.00 0.50 0.00 // 7 0 7.00 0.50 0.00 // 7 0 7.00 0.50 0.00 }
public static void UseModelForSinglePrediction(MLContext mlContext, ITransformer model) { OsExtensions.WriteOver($"Processing predictions, pass {Config.CurrentIteration}"); var predictionEngine = mlContext.Model.CreatePredictionEngine <BrowseHistory, BrowsePrediction>(model); foreach (var agent in Agents) { var recs = new List <BrowsePrediction>(); for (var i = 1; i < 500000; i++) { var testInput = new BrowseHistory { userId = agent.Id, itemId = i }; var itemPrediction = predictionEngine.Predict(testInput); itemPrediction.Iteration = Config.CurrentIteration; itemPrediction.UserId = testInput.userId; itemPrediction.ItemId = testInput.itemId; if (Math.Round(itemPrediction.Score, 1) > 3.5) { var site = Sites.FirstOrDefault(o => o.Id == itemPrediction.ItemId); if (site == null) { continue; } if (agent.Preference == site.Category) { // add matching sites with positive correlation itemPrediction.Score = 5; recs.Add(itemPrediction); //results.Add($"Item {testInput.itemId} is recommended for user {testInput.userId} at {Math.Round(itemPrediction.Score, 1)}"); } else { // add but rate as poor match itemPrediction.Score = 1; recs.Add(itemPrediction); } } } using (StreamWriter w = File.AppendText(Config.OutputFile)) { //var rnd = new Random(); //var choices = recs.OrderBy(x => rnd.Next()).Take(25); var choices = recs; foreach (var rec in choices) { TimeSpan t = DateTime.UtcNow - new DateTime(1970, 1, 1); int secondsSinceEpoch = (int)t.TotalSeconds; w.WriteLine($"{rec.UserId},{rec.ItemId},{Math.Round(rec.Score, 1)},{secondsSinceEpoch},{rec.Iteration}"); //user_id,item_id,timestamp } } } }
public XTablePortable(ITable <IData, IData> table, ITransformer <TKey, IData> keyTransformer = null, ITransformer <TRecord, IData> recordTransformer = null) { if (table == null) { throw new ArgumentNullException("table"); } Table = table; if (keyTransformer == null) { keyTransformer = new DataTransformer <TKey>(table.Descriptor.KeyType); } if (recordTransformer == null) { recordTransformer = new DataTransformer <TRecord>(table.Descriptor.RecordType); } KeyTransformer = keyTransformer; RecordTransformer = recordTransformer; }
public GameDefinitionDetailsViewModelBuilder(IPlayedGameDetailsViewModelBuilder playedGameDetailsViewModelBuilder, ITransformer transformer) { _playedGameDetailsViewModelBuilder = playedGameDetailsViewModelBuilder; _transformer = transformer; }
public TransformerThenParser(ITransformer <S, R1> transformer, IParser <R2> parser) { this.Transformer = transformer; this.Parser = parser; }
public Prev(int pos, ITransformer transformer) : base(-Math.Abs(pos), transformer) { if (pos == 0) throw new ArgumentOutOfRangeException(); }
public TransformerThenConsumer(ITransformer <S, R> transformer, IConsumer consumer) { this.Transformer = transformer; this.Consumer = consumer; }
public IEnumerable <float[]> Score(ITransformer model, IDataView data) { return(PredictDataUsingModel(data, model)); }
public TransformerThenTransformer(ITransformer <S, I> firstTransformer, ITransformer <I, R> secondTransformer) { this.FirstTransformer = firstTransformer; this.SecondTransformer = secondTransformer; }
protected override IAsset TranslateAssetCore(IAsset asset, ITransformer transformer, bool isDebugMode) { return(InnerTranslateAsset <LessTranslator>("LessTranslator", asset, transformer, isDebugMode)); }
public void Is(ITransformer <S, R> transformer) { this.Transformer.Value = transformer; }
private static void Main(string[] args) { //Create the MLContext to share across components for deterministic results MLContext mlContext = new MLContext(seed: 1); //Seed set to any number so you have a deterministic environment // STEP 1: Common data loading configuration IDataView fullData = mlContext.Data.LoadFromTextFile(path: DataPath, columns: new[] { new TextLoader.Column("Label", DataKind.Single, 0), new TextLoader.Column(nameof(IrisData.SepalLength), DataKind.Single, 1), new TextLoader.Column(nameof(IrisData.SepalWidth), DataKind.Single, 2), new TextLoader.Column(nameof(IrisData.PetalLength), DataKind.Single, 3), new TextLoader.Column(nameof(IrisData.PetalWidth), DataKind.Single, 4), }, hasHeader: true, separatorChar: '\t'); //Split dataset in two parts: TrainingDataset (80%) and TestDataset (20%) DataOperationsCatalog.TrainTestData trainTestData = mlContext.Data.TrainTestSplit(fullData, testFraction: 0.2); trainingDataView = trainTestData.TrainSet; testingDataView = trainTestData.TestSet; //STEP 2: Process data transformations in pipeline var dataProcessPipeline = mlContext.Transforms.Concatenate("Features", nameof(IrisData.SepalLength), nameof(IrisData.SepalWidth), nameof(IrisData.PetalLength), nameof(IrisData.PetalWidth)); // (Optional) Peek data in training DataView after applying the ProcessPipeline's transformations Common.ConsoleHelper.PeekDataViewInConsole(mlContext, trainingDataView, dataProcessPipeline, 10); Common.ConsoleHelper.PeekVectorColumnDataInConsole(mlContext, "Features", trainingDataView, dataProcessPipeline, 10); // STEP 3: Create and train the model var trainer = mlContext.Clustering.Trainers.KMeans(featureColumnName: "Features", numberOfClusters: 3); var trainingPipeline = dataProcessPipeline.Append(trainer); var trainedModel = trainingPipeline.Fit(trainingDataView); // STEP4: Evaluate accuracy of the model IDataView predictions = trainedModel.Transform(testingDataView); var metrics = mlContext.Clustering.Evaluate(predictions, scoreColumnName: "Score", featureColumnName: "Features"); ConsoleHelper.PrintClusteringMetrics(trainer.ToString(), metrics); // STEP5: Save/persist the model as a .ZIP file using (var fs = new FileStream(ModelPath, FileMode.Create, FileAccess.Write, FileShare.Write)) mlContext.Model.Save(trainedModel, trainingDataView.Schema, fs); Console.WriteLine("=============== End of training process ==============="); Console.WriteLine("=============== Predict a cluster for a single case (Single Iris data sample) ==============="); // Test with one sample text var sampleIrisData = new IrisData() { SepalLength = 3.3f, SepalWidth = 1.6f, PetalLength = 0.2f, PetalWidth = 5.1f, }; using (var stream = new FileStream(ModelPath, FileMode.Open, FileAccess.Read, FileShare.Read)) { ITransformer model = mlContext.Model.Load(stream, out var modelInputSchema); // Create prediction engine related to the loaded trained model var predEngine = mlContext.Model.CreatePredictionEngine <IrisData, IrisPrediction>(model); //Score var resultprediction = predEngine.Predict(sampleIrisData); Console.WriteLine($"Cluster assigned for setosa flowers:" + resultprediction.SelectedClusterId); } Console.WriteLine("=============== End of process, hit any key to finish ==============="); Console.ReadKey(); }
public OptionalTransformer(ITransformer <R, R> transformer) { this.Transformer = transformer; }
//public double getProbabilityThreshold() //{ // return lrModel.getThreshold(); //} public void setLogisticRegressionModel(ITransformer lrm) { lrModel = lrm; }
public void CreateTransformer() { XmlSerializerTransformer xmlTransformer = new XmlSerializerTransformer(); // just a fake out to test the transformer RuntimeConfigurationView configurationView = new RuntimeConfigurationView(Context); xmlTransformer.CurrentSectionName = "ApplConfig1"; xmlTransformer.Initialize(configurationView); transformer = xmlTransformer; }
private static void LoadModel() { mlContext = new MLContext(1); trainedModel = mlContext.Model.Load(modelFileName, out var modelInputSchema); predEngine = mlContext.Model.CreatePredictionEngine <SentimentIssue, SentimentPrediction>(trainedModel); }
public EmployeeRepository(ITransformer<EmployeeModel, Employee> transformer) { this.transformer = transformer; }
public static void EvaluateModel(MLContext mlContext, IDataView testDataView, ITransformer model) { using (StreamWriter w = File.AppendText(Config.StatsFile)) { results.Add("Evaluating model..."); var prediction = model.Transform(testDataView); var metrics = mlContext.Regression.Evaluate(prediction, labelColumnName: "Label", scoreColumnName: "Score"); results.Add("Root Mean Squared Error : " + metrics.RootMeanSquaredError.ToString()); results.Add("RSquared: " + metrics.RSquared.ToString()); w.WriteLine($"{Config.CurrentIteration},{metrics.RootMeanSquaredError},{metrics.RSquared},{metrics.LossFunction},{metrics.MeanAbsoluteError},{metrics.MeanSquaredError}"); } }
public HtmlAwareMultiTransformer(ITransformer delegateTransformer) { this.delegateTransformer = delegateTransformer; }
public static BrowseRecommendationsResults Run(Configuration config = null) { if (config == null) { Config = new Configuration(); } else { Config = config; } var stopwatch = System.Diagnostics.Stopwatch.StartNew(); results.Add($"Building test {Config.TestNumber} directories..."); if (!Directory.Exists(Configuration.BaseDirectory)) { Directory.CreateDirectory(Configuration.BaseDirectory); } if (!Directory.Exists($"{Configuration.BaseDirectory}/{Config.TestNumber}")) { Directory.CreateDirectory($"{Configuration.BaseDirectory}/{Config.TestNumber}"); } if (!Directory.Exists($"{Configuration.BaseDirectory}/dependencies")) { Directory.CreateDirectory($"{Configuration.BaseDirectory}/dependencies"); } if (!File.Exists(Config.AgentsFile)) { results.Add("Generating agents file..."); Generators.GenerateAgentsFile(Config); } if (!File.Exists(Config.SitesFile)) { results.Add("Generating sites file..."); Generators.GenerateSitesFile(Config); } if (!File.Exists(Config.InputFilePref) || !File.Exists(Config.InputFileRand)) { results.Add("Generating browse history files..."); Generators.GenerateNewBrowseFiles(Config); } var typesToProcess = new[] { "pref", "rand" }; foreach (var typeToProcess in typesToProcess) { Config.CurrentType = typeToProcess; results.Add($"Initializing {Config.CurrentType}..."); results.Add("Extracting test file..."); if (!File.Exists(Config.TestFile)) { //build test file from input var lines = File.ReadAllLines(Config.InputFile); var numberForTest = (lines.Length * Config.PercentOfDataIsTest); var linesToRemove = new List <int>(); using (StreamWriter w = File.AppendText(Config.TestFile)) { w.WriteLine("user_id,item_id,rating,timestamp,iteration".ToLower()); int recordsCopied = 0; while (recordsCopied < numberForTest) { var r = new Random(); var randomLineNumber = r.Next(1, lines.Length - 1); while (linesToRemove.Contains(randomLineNumber)) { randomLineNumber = r.Next(1, lines.Length - 1); } var line = lines[randomLineNumber]; w.WriteLine(line); linesToRemove.Add(randomLineNumber); recordsCopied++; } } //remove test data from input file if (File.Exists(Config.InputFile + ".backup")) { File.Delete(Config.InputFile + ".backup"); } File.Move(Config.InputFile, Config.InputFile + ".backup"); using (StreamWriter w = File.AppendText(Config.InputFile)) { w.WriteLine("user_id,item_id,rating,timestamp,iteration".ToLower()); int i = -1; foreach (var line in lines) { i++; if (i == 0 || linesToRemove.Contains(i)) { continue; } w.WriteLine(line); } } } MLContext mlContext = new MLContext(); (IDataView trainingDataView, IDataView testDataView) = LoadData(mlContext); Agents = new List <Agent>(); using (var fileStream = File.OpenRead(Config.AgentsFile)) { using (var streamReader = new StreamReader(fileStream, Encoding.UTF8, true, 128)) { var i = -1; String line; while ((line = streamReader.ReadLine()) != null) { i++; if (i == 0) { continue; } var o = line.Split(Convert.ToChar(",")); Agents.Add(new Agent(Convert.ToInt32(o[0]), o[1], Convert.ToInt32(Convert.ToDouble(o[2])))); } } } Sites = new List <Site>(); using (var fileStream = File.OpenRead(Config.SitesFile)) { using (var streamReader = new StreamReader(fileStream, Encoding.UTF8, true, 128)) { String line; while ((line = streamReader.ReadLine()) != null) { var o = line.Split(Convert.ToChar(",")); try { Sites.Add(new Site(Convert.ToInt32(o[0]), o[1])); } catch { } //lazy, don't @ me } } } results.Add($"Initializing model and associated requirements..."); if (!File.Exists(Config.ModelFile)) { ITransformer model = BuildAndTrainModel(mlContext, trainingDataView); EvaluateModel(mlContext, testDataView, model); UseModelForSinglePrediction(mlContext, model); SaveModel(mlContext, trainingDataView.Schema, model); } /* * results.Add("=============== Running Experiment ==============="); * var experimentSettings = new RecommendationExperimentSettings(); * experimentSettings.MaxExperimentTimeInSeconds = 3600; * experimentSettings.OptimizingMetric = RegressionMetric.MeanSquaredError; * var experiment = mlContext.Auto().CreateRecommendationExperiment(experimentSettings); * ExperimentResult<RegressionMetrics> experimentResult = mlContext.Auto() * .CreateRecommendationExperiment(new RecommendationExperimentSettings() { MaxExperimentTimeInSeconds = 3600 }) * .Execute(trainingDataView, testDataView, * new ColumnInformation() * { * LabelColumnName = "Label", * UserIdColumnName = "userId", * ItemIdColumnName = "itemId" * }); * // STEP 3: Print metric from best model * RunDetail<RegressionMetrics> bestRun = experimentResult.BestRun; * results.Add($"Total models produced: {experimentResult.RunDetails.Count()}"); * results.Add($"Best model's trainer: {bestRun.TrainerName}"); * results.Add($"Metrics of best model from validation data --"); * PrintMetrics(bestRun.ValidationMetrics); * Environment.Exit(1); */ //now that we have a model, we'll loop through that model x times - same model, growing dataset over iteration for (var i = 1; i < Config.Iterations; i++) { Config.CurrentIteration = i; //Define DataViewSchema for data preparation pipeline and trained model DataViewSchema modelSchema; // Load trained model var trainedModel = mlContext.Model.Load(Config.ModelFile, out modelSchema); // Load data preparation pipeline and trained model UseModelForSinglePrediction(mlContext, trainedModel); } results.Add("Generating final reports..."); Generators.GenerateReportFile(Config); results.Add($"{Config.CurrentType} completed in {stopwatch.ElapsedMilliseconds} ms"); } stopwatch.Stop(); results.Add($"Test completed in {stopwatch.ElapsedMilliseconds} ms"); //load result file var recommendations = RecommendationsService.Load(config.ResultFileOut); var browseRecommendationsResults = new BrowseRecommendationsResults { JobOutput = results, Recommendations = recommendations }; return(browseRecommendationsResults); }
public static float GetPrediction(ITransformer trainedModel, Employee employee) { var predictionEngine = MlContext.Model.CreatePredictionEngine <EmployeeDto, SalaryPrediction>(trainedModel); return(GetPrediction(predictionEngine, employee)); }
public CSharpOperation(string outKey, string outType, string script, Dictionary<string, Script> scripts, IParameters parameters) : base(string.Empty, outKey) { var csc = new CSharpCodeProvider(); var ca = Assembly.GetExecutingAssembly(); var cp = new CompilerParameters { GenerateInMemory = true }; var testRow = new Row(); cp.ReferencedAssemblies.Add("System.dll"); cp.ReferencedAssemblies.Add("System.Core.dll"); cp.ReferencedAssemblies.Add("mscorlib.dll"); cp.ReferencedAssemblies.Add(ca.Location); var scriptBuilder = new StringBuilder(string.Empty); foreach (var s in scripts) { scriptBuilder.AppendLine($"// {s.Value.Name} script"); scriptBuilder.AppendLine(s.Value.Content); } var castBuilder = new StringBuilder(string.Empty); if (!parameters.Any()) { castBuilder.AppendLine(string.Format("{1} {0} = ({1}) row[\"{0}\"];", OutKey, Common.ToSystemType(outType))); testRow[OutKey] = new DefaultFactory(Logger).Convert(null, outType); } else { var map = Common.GetLiteral(); foreach (var pair in parameters) { if (pair.Value.HasValue()) { castBuilder.AppendLine($"{Common.ToSystemType(pair.Value.SimpleType)} {pair.Value.Name} = {map[pair.Value.SimpleType](pair.Value.Value)};"); } else { castBuilder.AppendLine(string.Format("{1} {0} = ({1}) row[\"{0}\"];", pair.Value.Name, Common.ToSystemType(pair.Value.SimpleType))); } testRow[pair.Value.Name] = new DefaultFactory(Logger).Convert(null, pair.Value.SimpleType); } } var code = $@"using System; using System.Text; using System.Collections.Generic; using System.Linq; using Transformalize.Operations.Transform; using Transformalize.Libs.Rhino.Etl; {scriptBuilder} public class Transformer : ITransformer {{ public object Transform(Row row) {{ {castBuilder} //User's script {script} }} }}"; Logger.EntityDebug(EntityName, "Compiling this code:"); Logger.EntityDebug(EntityName, code); var res = csc.CompileAssemblyFromSource( cp, code ); if (res.Errors.Count == 0) { var type = res.CompiledAssembly.GetType("Transformer"); _transformer = (ITransformer)Activator.CreateInstance(type); try { var test = _transformer.Transform(testRow); Logger.EntityDebug(EntityName, "CSharp transform compiled and passed test. {0}", test); } catch (Exception e) { Logger.EntityDebug(EntityName, "CSharp transform compiled but failed test. {0}", e.Message); Logger.EntityDebug(EntityName, e.StackTrace); } } else { foreach (var error in res.Errors) { Logger.EntityError(EntityName, error.ToString()); } throw new TransformalizeException(Logger, EntityName, "Failed to compile code. {0}", code); } Name = $"CSharpOperation ({outKey})"; }
public static IEnumerable <KeyValuePair <Employee, float> > GetPrediction(ITransformer trainedModel, IEnumerable <Employee> employees) { var predictionEngine = MlContext.Model.CreatePredictionEngine <EmployeeDto, SalaryPrediction>(trainedModel); return(employees.Select(e => new KeyValuePair <Employee, float>(e, GetPrediction(predictionEngine, e)))); }
public PlayerStatsController(IPlayerRetriever playerRetriever, ITransformer transformer) { this.playerRetriever = playerRetriever; this.transformer = transformer; }
internal XFindIndex(DynamicDelegate f, ITransformer xf) : base(f, xf) { idx = -1; }
public ParserThenTransformer(IParser <R1> parser, ITransformer <R1, R2> transformer) { this.Parser = parser; this.Transformer = transformer; }
public Prev(ITransformer transformer) : this(1, transformer) { }
/// <summary> /// Register new <see cref="ITransformer"/>. /// </summary> /// <param name="transformer"><see cref="ITransformer"/> instance.</param> public void RegisterTransformer(ITransformer transformer) { this.transformers.Add(transformer.Name, transformer); }
public OneOrMoreTransformer(ITransformer <R, R> transformer) { this.Transformer = transformer; }
public ModificationHistoryProjectPlugin(IFarmService farmService, IPhysicalApplicationPathProvider pathProvider) { this.farmService = farmService; transformer = new XslTransformer(); this.pathProvider = pathProvider; }
private static Func <Schema, IRowToRowMapper> TransformerChecker(IExceptionContext ectx, ITransformer transformer) { ectx.CheckValue(transformer, nameof(transformer)); ectx.CheckParam(transformer.IsRowToRowMapper, nameof(transformer), "Must be a row to row mapper"); return(transformer.GetRowToRowMapper); }
public override bool Transform(ITransformer transformer) { return transformer.Transform(this); }
/// <summary> /// Method transforms TSource type collection to TResult type collection. /// </summary> /// <typeparam name="TSource"> /// Type of collection. /// </typeparam> /// <typeparam name="TResult"> /// Type of transformed collection. /// </typeparam> /// <param name="collection"> /// Collection to transform. /// </param> /// <param name="transformer"> /// Format to transform. /// </param> /// <returns> /// Returns an IEnumerator for the transformed collection. /// </returns> /// <exception cref="ArgumentNullException"> /// Collection must not be null. Collection elements must not be null. /// </exception> public static IEnumerable <TResult> Transform <TSource, TResult>(this IEnumerable <TSource> collection, ITransformer <TSource, TResult> transformer) { return(collection.Transform(transformer.Transform)); }