public static ModelOperations.PredictorModelOutput CombineOvaModels(IHostEnvironment env, ModelOperations.CombineOvaPredictorModelsInput input) { Contracts.CheckValue(env, nameof(env)); var host = env.Register("CombineOvaModels"); host.CheckValue(input, nameof(input)); EntryPointUtils.CheckInputArgs(host, input); host.CheckNonEmpty(input.ModelArray, nameof(input.ModelArray)); // Something tells me we should put normalization as part of macro expansion, but since i get // subgraph instead of learner it's a bit tricky to get learner and decide should we add // normalization node or not, plus everywhere in code we leave that reposnsibility to TransformModel. var normalizedView = input.ModelArray[0].TransformModel.Apply(host, input.TrainingData); using (var ch = host.Start("CombineOvaModels")) { ISchema schema = normalizedView.Schema; var label = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(input.LabelColumn), input.LabelColumn, DefaultColumnNames.Label); var feature = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(input.FeatureColumn), input.FeatureColumn, DefaultColumnNames.Features); var weight = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(input.WeightColumn), input.WeightColumn, DefaultColumnNames.Weight); var data = new RoleMappedData(normalizedView, label, feature, null, weight); return(new ModelOperations.PredictorModelOutput { PredictorModel = new PredictorModel(env, data, input.TrainingData, Create(host, input.UseProbabilities, input.ModelArray.Select(p => p.Predictor as IPredictorProducing <float>).ToArray())) }); } }
public static ModelOperations.PredictorModelOutput CombineOvaModels(IHostEnvironment env, ModelOperations.CombineOvaPredictorModelsInput input) { Contracts.CheckValue(env, nameof(env)); var host = env.Register("CombineOvaModels"); host.CheckValue(input, nameof(input)); EntryPointUtils.CheckInputArgs(host, input); host.CheckNonEmpty(input.ModelArray, nameof(input.ModelArray)); using (var ch = host.Start("CombineOvaModels")) { ISchema schema = input.TrainingData.Schema; var label = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(input.LabelColumn), input.LabelColumn, DefaultColumnNames.Label); var feature = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(input.FeatureColumn), input.FeatureColumn, DefaultColumnNames.Features); var weight = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(input.WeightColumn), input.WeightColumn, DefaultColumnNames.Weight); var data = TrainUtils.CreateExamples(input.TrainingData, label, feature, null, weight); return(new ModelOperations.PredictorModelOutput { PredictorModel = new PredictorModel(env, data, input.TrainingData, Create(host, input.UseProbabilities, input.ModelArray.Select(p => p.Predictor as IPredictorProducing <float>).ToArray())) }); } }