internal override RoleMappedSchema GetTrainingSchema(IHostEnvironment env) { Contracts.CheckValue(env, nameof(env)); var predInput = TransformModel.Apply(env, new EmptyDataView(env, TransformModel.InputSchema)); var trainRms = new RoleMappedSchema(predInput.Schema, _roleMappings, opt: true); return(trainRms); }
internal override PredictorModel Apply(IHostEnvironment env, TransformModel transformModel) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(transformModel, nameof(transformModel)); TransformModel newTransformModel = TransformModel.Apply(env, transformModel); Contracts.AssertValue(newTransformModel); return(new PredictorModelImpl(newTransformModel, Predictor, _roleMappings)); }
private PredictorModelImpl(TransformModel transformModel, IPredictor predictor, KeyValuePair <RoleMappedSchema.ColumnRole, string>[] roleMappings) { Contracts.AssertValue(transformModel); Contracts.AssertValue(predictor); Contracts.AssertValue(roleMappings); TransformModel = transformModel; Predictor = predictor; _roleMappings = roleMappings; }
internal override void PrepareData(IHostEnvironment env, IDataView input, out RoleMappedData roleMappedData, out IPredictor predictor) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(input, nameof(input)); input = TransformModel.Apply(env, input); roleMappedData = new RoleMappedData(input, _roleMappings, opt: true); predictor = Predictor; }
internal override void Save(IHostEnvironment env, Stream stream) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(stream, nameof(stream)); using (var ch = env.Start("Saving predictor model")) { // REVIEW: address the asymmetry in the way we're loading and saving the model. // Effectively, we have methods to load the transform model from a model.zip, but don't have // methods to compose the model.zip out of transform model, predictor and role mappings // (we use the TrainUtils.SaveModel that does all three). // Create the chain of transforms for saving. IDataView data = new EmptyDataView(env, TransformModel.InputSchema); data = TransformModel.Apply(env, data); var roleMappedData = new RoleMappedData(data, _roleMappings, opt: true); TrainUtils.SaveModel(env, ch, stream, Predictor, roleMappedData); } }
public static CombineTransformModelsOutput CombineTransformModels(IHostEnvironment env, CombineTransformModelsInput input) { Contracts.CheckValue(env, nameof(env)); var host = env.Register("CombineTransformModels"); host.CheckValue(input, nameof(input)); EntryPointUtils.CheckInputArgs(host, input); host.CheckNonEmpty(input.Models, nameof(input.Models)); TransformModel model = input.Models[input.Models.Length - 1]; for (int i = input.Models.Length - 2; i >= 0; i--) { model = model.Apply(env, input.Models[i]); } return(new CombineTransformModelsOutput { OutputModel = model }); }
/// <summary> /// Apply this transform model to the given input transform model to produce a composite transform model. /// </summary> internal override TransformModel Apply(IHostEnvironment env, TransformModel input) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(input, nameof(input)); IDataView view; Schema schemaRoot = input.InputSchema; var mod = input as TransformModelImpl; if (mod != null) { view = ApplyTransformUtils.ApplyAllTransformsToData(env, _chain, mod._chain); } else { view = new EmptyDataView(env, schemaRoot); view = input.Apply(env, view); view = Apply(env, view); } return(new TransformModelImpl(env, schemaRoot, view)); }
internal abstract TransformModel Apply(IHostEnvironment env, TransformModel input);
internal PredictionModel(Stream stream) { _env = new TlcEnvironment(); _predictorModel = new Runtime.EntryPoints.TransformModel(_env, stream); }
internal abstract PredictorModel Apply(IHostEnvironment env, TransformModel transformModel);