public void Save(IHostEnvironment env, Stream stream) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(stream, nameof(stream)); using (var ch = env.Start("Saving predictor model")) { // REVIEW: address the asymmetry in the way we're loading and saving the model. // Effectively, we have methods to load the transform model from a model.zip, but don't have // methods to compose the model.zip out of transform model, predictor and role mappings // (we use the TrainUtils.SaveModel that does all three). // Create the chain of transforms for saving. IDataView data = new EmptyDataView(env, _transformModel.InputSchema); data = _transformModel.Apply(env, data); var roleMappedData = new RoleMappedData(data, _roleMappings, opt: true); TrainUtils.SaveModel(env, ch, stream, _predictor, roleMappedData); } }
public static CombineTransformModelsOutput CombineTransformModels(IHostEnvironment env, CombineTransformModelsInput input) { Contracts.CheckValue(env, nameof(env)); var host = env.Register("CombineTransformModels"); host.CheckValue(input, nameof(input)); EntryPointUtils.CheckInputArgs(host, input); host.CheckNonEmpty(input.Models, nameof(input.Models)); ITransformModel model = input.Models[input.Models.Length - 1]; for (int i = input.Models.Length - 2; i >= 0; i--) { model = model.Apply(env, input.Models[i]); } return(new CombineTransformModelsOutput { OutputModel = model }); }
/// <summary> /// Apply this transform model to the given input transform model to produce a composite transform model. /// </summary> public ITransformModel Apply(IHostEnvironment env, ITransformModel input) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(input, nameof(input)); IDataView view; Schema schemaRoot = input.InputSchema; var mod = input as TransformModel; if (mod != null) { view = ApplyTransformUtils.ApplyAllTransformsToData(env, _chain, mod._chain); } else { view = new EmptyDataView(env, schemaRoot); view = input.Apply(env, view); view = Apply(env, view); } return(new TransformModel(env, schemaRoot, view)); }
private void Run(IChannel ch) { IDataLoader loader = null; IPredictor rawPred = null; IDataView view; RoleMappedSchema trainSchema = null; if (_model == null) { if (string.IsNullOrEmpty(Args.InputModelFile)) { loader = CreateLoader(); rawPred = null; trainSchema = null; Host.CheckUserArg(Args.LoadPredictor != true, nameof(Args.LoadPredictor), "Cannot be set to true unless " + nameof(Args.InputModelFile) + " is also specifified."); } else { LoadModelObjects(ch, _loadPredictor, out rawPred, true, out trainSchema, out loader); } view = loader; } else { view = _model.Apply(Host, new EmptyDataView(Host, _model.InputSchema)); } // Get the transform chain. IDataView source; IDataView end; LinkedList <ITransformCanSaveOnnx> transforms; GetPipe(ch, view, out source, out end, out transforms); Host.Assert(transforms.Count == 0 || transforms.Last.Value == end); var assembly = System.Reflection.Assembly.GetExecutingAssembly(); var versionInfo = System.Diagnostics.FileVersionInfo.GetVersionInfo(assembly.Location); var ctx = new OnnxContextImpl(Host, _name, ProducerName, versionInfo.FileVersion, ModelVersion, _domain); // If we have a predictor, try to get the scorer for it. if (rawPred != null) { RoleMappedData data; if (trainSchema != null) { data = RoleMappedData.Create(end, trainSchema.GetColumnRoleNames()); } else { // We had a predictor, but no roles stored in the model. Just suppose // default column names are OK, if present. data = TrainUtils.CreateExamplesOpt(end, DefaultColumnNames.Label, DefaultColumnNames.Features, DefaultColumnNames.GroupId, DefaultColumnNames.Weight, DefaultColumnNames.Name); } var scorePipe = ScoreUtils.GetScorer(rawPred, data, Host, trainSchema); var scoreOnnx = scorePipe as ITransformCanSaveOnnx; if (scoreOnnx?.CanSaveOnnx == true) { Host.Assert(scorePipe.Source == end); end = scorePipe; transforms.AddLast(scoreOnnx); } else { Contracts.CheckUserArg(_loadPredictor != true, nameof(Arguments.LoadPredictor), "We were explicitly told to load the predictor but we do not know how to save it as ONNX."); ch.Warning("We do not know how to save the predictor as ONNX. Ignoring."); } } else { Contracts.CheckUserArg(_loadPredictor != true, nameof(Arguments.LoadPredictor), "We were explicitly told to load the predictor but one was not present."); } HashSet <string> inputColumns = new HashSet <string>(); //Create graph inputs. for (int i = 0; i < source.Schema.ColumnCount; i++) { string colName = source.Schema.GetColumnName(i); if (_inputsToDrop.Contains(colName)) { continue; } ctx.AddInputVariable(source.Schema.GetColumnType(i), colName); inputColumns.Add(colName); } //Create graph nodes, outputs and intermediate values. foreach (var trans in transforms) { Host.Assert(trans.CanSaveOnnx); trans.SaveAsOnnx(ctx); } //Add graph outputs. for (int i = 0; i < end.Schema.ColumnCount; ++i) { if (end.Schema.IsHidden(i)) { continue; } var idataviewColumnName = end.Schema.GetColumnName(i);; if (_outputsToDrop.Contains(idataviewColumnName) || _inputsToDrop.Contains(idataviewColumnName)) { continue; } var variableName = ctx.TryGetVariableName(idataviewColumnName); if (variableName != null) { ctx.AddOutputVariable(end.Schema.GetColumnType(i), variableName); } } var model = ctx.MakeModel(); if (_outputModelPath != null) { using (var file = Host.CreateOutputFile(_outputModelPath)) using (var stream = file.CreateWriteStream()) model.WriteTo(stream); } if (_outputJsonModelPath != null) { using (var file = Host.CreateOutputFile(_outputJsonModelPath)) using (var stream = file.CreateWriteStream()) using (var writer = new StreamWriter(stream)) { var parsedJson = JsonConvert.DeserializeObject(model.ToString()); writer.Write(JsonConvert.SerializeObject(parsedJson, Formatting.Indented)); } } if (!string.IsNullOrWhiteSpace(Args.OutputModelFile)) { Contracts.Assert(loader != null); ch.Trace("Saving the data pipe"); // Should probably include "end"? SaveLoader(loader, Args.OutputModelFile); } }