/// <summary> /// Initializes the pipeline. /// </summary> /// <param name="transforms">list of transform, can be empty</param> /// <param name="predictor">a predictor, can be empty</param> /// <param name="host">can be empty too, a <see cref="ExtendedConsoleEnvironment"/> is then created</param> public ScikitPipeline(string[] transforms = null, string predictor = null, IHostEnvironment host = null) { _dispose = false; _env = host ?? ExtendedConsoleEnvironment(); _transforms = new StepTransform[transforms == null ? 1 : transforms.Length + 1]; // We add a PassThroughTransform to be able to change the source. _transforms[0] = new StepTransform() { transformSettings = "pass", transform = null }; if (transforms != null) { for (int i = 0; i < transforms.Length; ++i) { _transforms[i + 1] = new StepTransform() { transformSettings = transforms[i], transform = null } } } ; _predictor = predictor == null ? null : new StepPredictor() { trainerSettings = predictor, predictor = null, trainer = null, roleMapData = null }; _loaderSettings = null; _roles = null; _fastValueMapper = null; _fastValueMapperObject = null; }
public async void StepTransform_Execute() { var teste1 = new Teste1 { Id = 1, Value = "Value", Date = new DateTime(2000, 1, 1) }; var stepMock = new Mock <IStep <Teste1> >(); stepMock.Setup(_ => _.Execute()) .ReturnsAsync(() => new StepValue <Teste1>( teste1, null)); var stepTransform = new StepTransform <Teste1, Teste2>(stepMock.Object, _ => Task.Run(() => new Teste2 { Concat = $"{_.Value.Id}{_.Value.Value}{_.Value.Date}" })); var result = await stepTransform.Execute(); result.TypeValue.Should().Be(typeof(Teste2)); result.Value.Concat.Should().Be($"{teste1.Id}{teste1.Value}{teste1.Date}"); result.Parent.TypeValue.Should().Be(typeof(Teste1)); ((Teste1)result.Parent.Value).Should().BeEquivalentTo(teste1); }
/// <summary> /// Loads a pipeline saved in zip format. /// </summary> protected void Load(Stream fs) { var transformPipe = ModelFileUtils.LoadPipeline(_env, fs, new MultiFileSource(null), true); var pred = _env.LoadPredictorOrNull(fs); IDataView root; for (root = transformPipe; root is IDataTransform && !(root is PassThroughTransform); root = ((IDataTransform)root).Source) { ; } if (!(root is PassThroughTransform)) { var tr = new PassThroughTransform(_env, new PassThroughTransform.Arguments(), root); transformPipe = ApplyTransformUtils.ApplyAllTransformsToData(_env, transformPipe, tr, root); } var stack = new List <IDataView>(); for (root = transformPipe; root is IDataTransform; root = ((IDataTransform)root).Source) { stack.Add(root); } stack.Reverse(); _transforms = new StepTransform[stack.Count]; for (int i = 0; i < _transforms.Length; ++i) { _transforms[i] = new StepTransform() { transform = stack[i] as IDataTransform, transformSettings = null } } ; if (pred == null) { _predictor = new StepPredictor() { predictor = null, roleMapData = null, trainer = null, trainerSettings = null } } ; else { #pragma warning disable CS0618 var ipred = pred.GetPredictorObject() as IPredictor; #pragma warning restore CS0618 _roles = ModelFileUtils.LoadRoleMappingsOrNull(_env, fs).ToList(); var data = new RoleMappedData(transformPipe, _roles); _predictor = new StepPredictor() { predictor = ipred, roleMapData = data, trainer = null, trainerSettings = null }; } _fastValueMapper = null; }
/// <summary> /// Loads a pipeline saved in onnx format. /// </summary> protected void LoadOnnx(Stream fs) { var root = new PassThroughTransform(_env, new PassThroughTransform.Arguments(), null); _transforms = new StepTransform[2]; _transforms[0] = new StepTransform() { transform = root as IDataTransform, transformSettings = null }; _transforms[1] = new StepTransform() { transform = ConvertFromOnnx.ReadOnnx(fs, root), transformSettings = null }; _predictor = new StepPredictor() { predictor = null, roleMapData = null, trainer = null, trainerSettings = null }; _fastValueMapper = null; }