/// <summary> /// Decision Manager Constructor /// </summary> /// <param name="surveyor">Convertion and table size storage unit. /// (If null [default] - will be instantiated in constructor)</param> /// <param name="ricochetCalc">Ricochet calc unit. NOTE: working in mm. /// (If null [default] - will be instantiated in constructor)</param> /// <param name="predictor">Predictor for ball coordinates /// (If null [default] - will be instantiated in constructor)</param> /// <param name="decisionTree">Full decision tree to make decisions per eacch rod. /// (If null [default] - will be instantiated in constructor)</param> /// <param name="controlledRods">Rods to be controlled by manager /// (If null [default] - will be instantiated in constructor)</param> public DecisionManager(ISurveyor surveyor = null, IInitializableRicochet ricochetCalc = null, IPredictor predictor = null, IDecisionTree decisionTree = null, List<IInitializableRod> controlledRods = null) { _surveyor = surveyor ?? new Surveyor(); IInitializableRicochet ricochetCalculator = ricochetCalc ?? new RicochetCalc(true, eUnits.Mm); _decisionTree = decisionTree ?? new FullDecisionTree(new PartialDecisionTree()); _predictor = predictor ?? new Predictor(_surveyor, ricochetCalculator); //use given rods if not null if (controlledRods != null) { _controlledRods = controlledRods; } //create new rods and init them else { _controlledRods = new List<IInitializableRod>(); foreach (eRod type in Enum.GetValues(typeof(eRod))) { IInitializableRod rod = new ControlRod(type, _surveyor, ricochetCalculator); rod.Initialize(); _controlledRods.Add(rod); } } }
private IEnumerator<int> GetResidual(int[] samples, int predictorOrder, IPredictor predictor) { int lastSample = predictorOrder > 0 ? samples[predictorOrder - 1] : 0; for (int i = predictorOrder; i < samples.Length; i++) { int nextSample = samples[i]; yield return nextSample - predictor.Next(lastSample); lastSample = nextSample; } }
/// <summary> /// Creates a tracker node with the given 6DOF device identifier (see InputMapper class /// for the identifier strings). /// </summary> /// <param name="name">The name of this node</param> /// <param name="deviceIdentifier">The 6DOF device identifier (see InputMapper class)</param> /// <exception cref="GoblinException">If the given device identifier is not a 6DOF device</exception> public TrackerNode(String name, String deviceIdentifier) : base(name) { worldTransform = Matrix.Identity; this.deviceIdentifier = deviceIdentifier; if (!InputMapper.Instance.Contains6DOFInputDevice(deviceIdentifier)) throw new GoblinException(deviceIdentifier + " is not recognized. Only 6DOF devices " + "are allowed to be used with TrackerNode."); smoother = null; predictor = null; smooth = false; predict = false; }
internal void WriteResidual(FlacBitStreamWriter writer, IPredictor predictor, int predictorOrder, int[] samples) { int totalPartitions = 1 << Order; System.Diagnostics.Debug.Assert((samples.Length % totalPartitions) == 0 && (samples.Length / totalPartitions) > predictorOrder && RiceParameters.Length == totalPartitions); if (!IsExtended) writer.WriteUnsigned(0, 2); else writer.WriteUnsigned(1, 2); writer.WriteUnsigned((uint)Order, 4); int j = predictorOrder; int samplePerPartition = samples.Length / totalPartitions; int encodingParameterPrecision = IsExtended ? 5 : 4; IEnumerator<int> residualData = GetResidual(samples, predictorOrder, predictor); for (int i = 0; i < totalPartitions; i++) { if (RiceParameters[i] >= 0) { int riceParameter = RiceParameters[i]; writer.WriteUnsigned((uint)riceParameter, encodingParameterPrecision); while (j++ < samplePerPartition) { if (!residualData.MoveNext()) throw new FlacException("Invalid amount of residual data"); writer.WriteRice(residualData.Current, riceParameter); } } else // escaped { writer.WriteUnsigned(~0U, encodingParameterPrecision); int samplePrecision = ~RiceParameters[i]; writer.WriteUnsigned((uint)samplePrecision, 5); while(j++ < samplePerPartition) { if (!residualData.MoveNext()) throw new FlacException("Invalid amount of residual data"); writer.WriteSigned(residualData.Current, samplePrecision); } } j = 0; } }
public static void DecisionManager_ClassInitialize(TestContext context) { _surveyorMock = Substitute.For<ISurveyor>(); _ricochetCalcMock = Substitute.For<IInitializableRicochet>(); _predictorMock = Substitute.For<IPredictor>(); _decisionTreeMock = Substitute.For<IDecisionTree>(); _rodGoalKeaperMock = Substitute.For<IInitializableRod>(); _rodDefenceMock = Substitute.For<IInitializableRod>(); _rodMidFieldMock = Substitute.For<IInitializableRod>(); _rodAttackMock = Substitute.For<IInitializableRod>(); _rodMockList = new List<IInitializableRod>() { _rodGoalKeaperMock, _rodDefenceMock, _rodMidFieldMock, _rodAttackMock }; }
private static void PrintMseError(IEnumerable<Game> games, IPredictor predictor) { foreach (Game g in games) { double pred = predictor.Predict(g); totalSquaredError += (pred - g.GoalDiff) * (pred - g.GoalDiff); totalLinearError += Math.Abs(pred - g.GoalDiff); if (Math.Sign(g.GoalDiff) == 0) totalSignError += 0.5d; else if (Math.Sign(pred) != Math.Sign(g.GoalDiff)) totalSignError += 1d; n++; Console.WriteLine("Predicted {0:0.00} was {1:0.00}", pred, g.GoalDiff); } Console.WriteLine("MSE {0:0.00}", totalSquaredError / (double)n); Console.WriteLine("MLE {0:0.00}", totalLinearError / (double)n); Console.WriteLine("Total Sign Error {0:0.00}", totalSignError / (double)n); }
public BinaryPredictionTransformer <LinearBinaryModelParameters> Train(IDataView trainData, IPredictor initialPredictor = null) => TrainTransformer(trainData, initPredictor: initialPredictor);
private IDataScorerTransform GetScorer(IHostEnvironment env, IDataView transforms, IPredictor pred, string testDataPath = null) { using (var ch = env.Start("Saving model")) using (var memoryStream = new MemoryStream()) { var trainRoles = new RoleMappedData(transforms, label: "Label", feature: "Features"); // Model cannot be saved with CacheDataView TrainUtils.SaveModel(env, ch, memoryStream, pred, trainRoles); memoryStream.Position = 0; using (var rep = RepositoryReader.Open(memoryStream, ch)) { IDataLoader testPipe = ModelFileUtils.LoadLoader(env, rep, new MultiFileSource(testDataPath), true); RoleMappedData testRoles = new RoleMappedData(testPipe, label: "Label", feature: "Features"); return(ScoreUtils.GetScorer(pred, testRoles, env, testRoles.Schema)); } } }
private void RunCore(IChannel ch, string cmd) { Host.AssertValue(ch); IPredictor inputPredictor = null; if (Args.ContinueTrain && !TrainUtils.TryLoadPredictor(ch, Host, Args.InputModelFile, out inputPredictor)) { ch.Warning("No input model file specified or model file did not contain a predictor. The model state cannot be initialized."); } ch.Trace("Constructing data pipeline"); IDataLoader loader = CreateRawLoader(); // If the per-instance results are requested and there is no name column, add a GenerateNumberTransform. var preXf = Args.PreTransform; if (!string.IsNullOrEmpty(Args.OutputDataFile)) { string name = TrainUtils.MatchNameOrDefaultOrNull(ch, loader.Schema, nameof(Args.NameColumn), Args.NameColumn, DefaultColumnNames.Name); if (name == null) { preXf = preXf.Concat( new[] { new KeyValuePair <string, IComponentFactory <IDataView, IDataTransform> >( "", ComponentFactoryUtils.CreateFromFunction <IDataView, IDataTransform>( (env, input) => { var args = new GenerateNumberTransform.Arguments(); args.Column = new[] { new GenerateNumberTransform.Column() { Name = DefaultColumnNames.Name }, }; args.UseCounter = true; return(new GenerateNumberTransform(env, args, input)); })) }).ToArray(); } } loader = CompositeDataLoader.Create(Host, loader, preXf); ch.Trace("Binding label and features columns"); IDataView pipe = loader; var stratificationColumn = GetSplitColumn(ch, loader, ref pipe); var scorer = Args.Scorer; var evaluator = Args.Evaluator; Func <IDataView> validDataCreator = null; if (Args.ValidationFile != null) { validDataCreator = () => { // Fork the command. var impl = new CrossValidationCommand(this); return(impl.CreateRawLoader(dataFile: Args.ValidationFile)); }; } FoldHelper fold = new FoldHelper(Host, RegistrationName, pipe, stratificationColumn, Args, CreateRoleMappedData, ApplyAllTransformsToData, scorer, evaluator, validDataCreator, ApplyAllTransformsToData, inputPredictor, cmd, loader, !string.IsNullOrEmpty(Args.OutputDataFile)); var tasks = fold.GetCrossValidationTasks(); var eval = evaluator?.CreateComponent(Host) ?? EvaluateUtils.GetEvaluator(Host, tasks[0].Result.ScoreSchema); // Print confusion matrix and fold results for each fold. for (int i = 0; i < tasks.Length; i++) { var dict = tasks[i].Result.Metrics; MetricWriter.PrintWarnings(ch, dict); eval.PrintFoldResults(ch, dict); } // Print the overall results. if (!TryGetOverallMetrics(tasks.Select(t => t.Result.Metrics).ToArray(), out var overallList)) { throw ch.Except("No overall metrics found"); } var overall = eval.GetOverallResults(overallList.ToArray()); MetricWriter.PrintOverallMetrics(Host, ch, Args.SummaryFilename, overall, Args.NumFolds); eval.PrintAdditionalMetrics(ch, tasks.Select(t => t.Result.Metrics).ToArray()); Dictionary <string, IDataView>[] metricValues = tasks.Select(t => t.Result.Metrics).ToArray(); SendTelemetryMetric(metricValues); // Save the per-instance results. if (!string.IsNullOrWhiteSpace(Args.OutputDataFile)) { var perInstance = EvaluateUtils.ConcatenatePerInstanceDataViews(Host, eval, Args.CollateMetrics, Args.OutputExampleFoldIndex, tasks.Select(t => t.Result.PerInstanceResults).ToArray(), out var variableSizeVectorColumnNames); if (variableSizeVectorColumnNames.Length > 0) { ch.Warning("Detected columns of variable length: {0}. Consider setting collateMetrics- for meaningful per-Folds results.", string.Join(", ", variableSizeVectorColumnNames)); } if (Args.CollateMetrics) { ch.Assert(perInstance.Length == 1); MetricWriter.SavePerInstance(Host, ch, Args.OutputDataFile, perInstance[0]); } else { int i = 0; foreach (var idv in perInstance) { MetricWriter.SavePerInstance(Host, ch, ConstructPerFoldName(Args.OutputDataFile, i), idv); i++; } } } }
public void ControlRod_TestInitialize() { _testAsset = new Predictor(_surveyorMock, _ricochetMock); }
private FlacResidualCoefficeints FindBestResidual(int[] channelSamples, int order, IPredictor predictor, FlacEncodingPolicy policy) { int[] residual; if (order > 0) { residual = new int[channelSamples.Length]; int lastSample = channelSamples[order - 1]; for (int i = order; i < residual.Length; i++) { int nextSample = channelSamples[i]; residual[i] = nextSample - predictor.Next(lastSample); lastSample = nextSample; } } else residual = channelSamples; int minRiceOrder = policy.RicePartionOrder.MinValue; int maxRiceOrder = policy.RicePartionOrder.MaxValue; List<FlacResidualCoefficeints> rices = new List<FlacResidualCoefficeints>(); int samplesPerPartition = channelSamples.Length >> minRiceOrder; if (samplesPerPartition << minRiceOrder != channelSamples.Length) { minRiceOrder = maxRiceOrder = 0; // reset minRiceOrder to zero; } for (int riceOrder = minRiceOrder; riceOrder <= maxRiceOrder; riceOrder++) { if (samplesPerPartition <= order) break; int partitionCount = 1 << riceOrder; int[] parameters = new int[partitionCount]; int totalPartitionDataSize = 0; int j = order; for (int i = 0; i < partitionCount; i++) { int skipAmount = i == 0 ? order : 0; int estimatedPartitionSize; int riceParameter; FindBestResidual(residual, samplesPerPartition * i + skipAmount, samplesPerPartition - skipAmount, out estimatedPartitionSize, out riceParameter); totalPartitionDataSize += estimatedPartitionSize; parameters[i] = riceParameter; } const int NormalPrecision = 4; const int ExtendedPrecision = 5; const int MinValueForExtendedParameters = 15; bool isExtended = Array.Exists(parameters, delegate(int x) { return x >= MinValueForExtendedParameters; }); int totalSize = 4 + totalPartitionDataSize + partitionCount * (isExtended ? ExtendedPrecision : NormalPrecision); FlacResidualCoefficeints rice = new FlacResidualCoefficeints(); rice.EstimatedSize = totalSize; rice.IsExtended = isExtended; rice.RiceParameters = parameters; rice.Order = riceOrder; rices.Add(rice); if ((samplesPerPartition & 1) != 0) break; samplesPerPartition >>= 1; } int bestRicePartition = 0; for (int i = 1; i < rices.Count; i++) { if (rices[bestRicePartition].EstimatedSize > rices[i].EstimatedSize) bestRicePartition = i; } return rices[bestRicePartition]; }
// Factory method for SignatureBindableMapper. private static ISchemaBindableMapper Create(IHostEnvironment env, Arguments args, IPredictor predictor) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(predictor, nameof(predictor)); var pred = predictor as IFeatureContributionMapper; env.CheckParam(pred != null, nameof(predictor), "Predictor doesn't support getting feature contributions"); return(new BindableMapper(env, pred, args.Top, args.Bottom, args.Normalize, args.Stringify)); }
public RegressionPredictionTransformer <PoissonRegressionModelParameters> Train(IDataView trainData, IPredictor initialPredictor = null) => TrainTransformer(trainData, initPredictor: initialPredictor);
/// <summary> /// Creates a node that is tracked by fiducial marker (can be either an array or /// a single marker) and updated automatically. /// </summary> /// <param name="name">Name of this marker node</param> /// <param name="tracker">A marker tracker used to track this fiducial marker</param> /// <param name="markerConfigs">A list of configs that specify the fiducial marker /// (can be either an array or a single marker) to look for</param> public MarkerNode(String name, IMarkerTracker tracker, params Object[] markerConfigs) : base(name) { this.tracker = tracker; if (tracker != null) { markerID = tracker.AssociateMarker(markerConfigs); this.markerConfigs = markerConfigs; } found = false; maxDropouts = 5; prevMatrix = Matrix.Identity; dropout = 0; optimize = false; smoother = null; predictor = null; smooth = false; predict = false; predictionTime = 0; inverseCameraView = Matrix.Identity; }
internal static MultiToBinaryPredictor Create <TLabel>(IHost host, VBuffer <TLabel> classes, TScalarPredictor[] predictors, IPredictor reclassPredictor, bool singleColumn, bool labelKey) { IImplBase impl; using (var ch = host.Start("Creating MultiToBinary predictor")) impl = new ImplRaw <TLabel>(classes, predictors, reclassPredictor, singleColumn, labelKey); return(new MultiToBinaryPredictor(host, impl)); }
internal PredictorModelImpl(IHostEnvironment env, RoleMappedData trainingData, IDataView startingData, IPredictor predictor) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(trainingData, nameof(trainingData)); env.CheckValue(predictor, nameof(predictor)); TransformModel = new TransformModelImpl(env, trainingData.Data, startingData); _roleMappings = trainingData.Schema.GetColumnRoleNames().ToArray(); Predictor = predictor; }
public LeftShiftPedictor(IPredictor basePredictor, int shift) { this.basePredictor = basePredictor; this.shift = shift; }
public static ISchemaBindableMapper Create(IHostEnvironment env, TreeEnsembleFeaturizerBindableMapper.Arguments args, IPredictor predictor) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(args, nameof(args)); env.CheckValue(predictor, nameof(predictor)); return(new TreeEnsembleFeaturizerBindableMapper(env, args, predictor)); }
private void Run(IChannel ch) { IDataLoader loader = null; IPredictor rawPred = null; IDataView view; RoleMappedSchema trainSchema = null; if (_model == null) { if (string.IsNullOrEmpty(Args.InputModelFile)) { loader = CreateLoader(); rawPred = null; trainSchema = null; Host.CheckUserArg(Args.LoadPredictor != true, nameof(Args.LoadPredictor), "Cannot be set to true unless " + nameof(Args.InputModelFile) + " is also specifified."); } else { LoadModelObjects(ch, _loadPredictor, out rawPred, true, out trainSchema, out loader); } view = loader; } else { view = _model.Apply(Host, new EmptyDataView(Host, _model.InputSchema)); } // Create the ONNX context for storing global information var assembly = System.Reflection.Assembly.GetExecutingAssembly(); var versionInfo = System.Diagnostics.FileVersionInfo.GetVersionInfo(assembly.Location); var ctx = new OnnxContextImpl(Host, _name, ProducerName, versionInfo.FileVersion, ModelVersion, _domain, Args.OnnxVersion); // Get the transform chain. IDataView source; IDataView end; LinkedList <ITransformCanSaveOnnx> transforms; GetPipe(ctx, ch, view, out source, out end, out transforms); Host.Assert(transforms.Count == 0 || transforms.Last.Value == end); // If we have a predictor, try to get the scorer for it. if (rawPred != null) { RoleMappedData data; if (trainSchema != null) { data = new RoleMappedData(end, trainSchema.GetColumnRoleNames()); } else { // We had a predictor, but no roles stored in the model. Just suppose // default column names are OK, if present. data = new RoleMappedData(end, DefaultColumnNames.Label, DefaultColumnNames.Features, DefaultColumnNames.GroupId, DefaultColumnNames.Weight, DefaultColumnNames.Name, opt: true); } var scorePipe = ScoreUtils.GetScorer(rawPred, data, Host, trainSchema); var scoreOnnx = scorePipe as ITransformCanSaveOnnx; if (scoreOnnx?.CanSaveOnnx(ctx) == true) { Host.Assert(scorePipe.Source == end); end = scorePipe; transforms.AddLast(scoreOnnx); } else { Contracts.CheckUserArg(_loadPredictor != true, nameof(Arguments.LoadPredictor), "We were explicitly told to load the predictor but we do not know how to save it as ONNX."); ch.Warning("We do not know how to save the predictor as ONNX. Ignoring."); } } else { Contracts.CheckUserArg(_loadPredictor != true, nameof(Arguments.LoadPredictor), "We were explicitly told to load the predictor but one was not present."); } HashSet <string> inputColumns = new HashSet <string>(); //Create graph inputs. for (int i = 0; i < source.Schema.ColumnCount; i++) { string colName = source.Schema.GetColumnName(i); if (_inputsToDrop.Contains(colName)) { continue; } ctx.AddInputVariable(source.Schema.GetColumnType(i), colName); inputColumns.Add(colName); } //Create graph nodes, outputs and intermediate values. foreach (var trans in transforms) { Host.Assert(trans.CanSaveOnnx(ctx)); trans.SaveAsOnnx(ctx); } //Add graph outputs. for (int i = 0; i < end.Schema.ColumnCount; ++i) { if (end.Schema.IsHidden(i)) { continue; } var idataviewColumnName = end.Schema.GetColumnName(i); // Since the last IDataView also contains columns of the initial IDataView, last IDataView's columns found in // _inputToDrop should be removed too. if (_inputsToDrop.Contains(idataviewColumnName) || _outputsToDrop.Contains(idataviewColumnName)) { continue; } var variableName = ctx.TryGetVariableName(idataviewColumnName); var trueVariableName = ctx.AddIntermediateVariable(null, idataviewColumnName, true); ctx.CreateNode("Identity", variableName, trueVariableName, ctx.GetNodeName("Identity"), ""); ctx.AddOutputVariable(end.Schema.GetColumnType(i), trueVariableName); } var model = ctx.MakeModel(); using (var file = Host.CreateOutputFile(_outputModelPath)) using (var stream = file.CreateWriteStream()) model.WriteTo(stream); if (_outputJsonModelPath != null) { using (var file = Host.CreateOutputFile(_outputJsonModelPath)) using (var stream = file.CreateWriteStream()) using (var writer = new StreamWriter(stream)) { var parsedJson = JsonConvert.DeserializeObject(model.ToString()); writer.Write(JsonConvert.SerializeObject(parsedJson, Formatting.Indented)); } } if (!string.IsNullOrWhiteSpace(Args.OutputModelFile)) { Contracts.Assert(loader != null); ch.Trace("Saving the data pipe"); // Should probably include "end"? SaveLoader(loader, Args.OutputModelFile); } }
private protected abstract VBuffer <float> InitializeWeightsFromPredictor(IPredictor srcPredictor);
private void RunCore(IChannel ch, string cmd) { Host.AssertValue(ch); Host.AssertNonEmpty(cmd); ch.Trace("Constructing trainer"); ITrainer trainer = ImplOptions.Trainer.CreateComponent(Host); IPredictor inputPredictor = null; if (ImplOptions.ContinueTrain && !TrainUtils.TryLoadPredictor(ch, Host, ImplOptions.InputModelFile, out inputPredictor)) { ch.Warning("No input model file specified or model file did not contain a predictor. The model state cannot be initialized."); } ch.Trace("Constructing the training pipeline"); IDataView trainPipe = CreateLoader(); var schema = trainPipe.Schema; string label = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(Arguments.LabelColumn), ImplOptions.LabelColumn, DefaultColumnNames.Label); string features = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(Arguments.FeatureColumn), ImplOptions.FeatureColumn, DefaultColumnNames.Features); string group = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(Arguments.GroupColumn), ImplOptions.GroupColumn, DefaultColumnNames.GroupId); string weight = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(Arguments.WeightColumn), ImplOptions.WeightColumn, DefaultColumnNames.Weight); string name = TrainUtils.MatchNameOrDefaultOrNull(ch, schema, nameof(Arguments.NameColumn), ImplOptions.NameColumn, DefaultColumnNames.Name); TrainUtils.AddNormalizerIfNeeded(Host, ch, trainer, ref trainPipe, features, ImplOptions.NormalizeFeatures); ch.Trace("Binding columns"); var customCols = TrainUtils.CheckAndGenerateCustomColumns(ch, ImplOptions.CustomColumns); var data = new RoleMappedData(trainPipe, label, features, group, weight, name, customCols); RoleMappedData validData = null; if (!string.IsNullOrWhiteSpace(ImplOptions.ValidationFile)) { if (!trainer.Info.SupportsValidation) { ch.Warning("Ignoring validationFile: Trainer does not accept validation dataset."); } else { ch.Trace("Constructing the validation pipeline"); IDataView validPipe = CreateRawLoader(dataFile: ImplOptions.ValidationFile); validPipe = ApplyTransformUtils.ApplyAllTransformsToData(Host, trainPipe, validPipe); validData = new RoleMappedData(validPipe, data.Schema.GetColumnRoleNames()); } } // In addition to the training set, some trainers can accept two data sets, validation set and test set, // in training phase. The major difference between validation set and test set is that training process may // indirectly use validation set to improve the model but the learned model should totally independent of test set. // Similar to validation set, the trainer can report the scores computed using test set. RoleMappedData testDataUsedInTrainer = null; if (!string.IsNullOrWhiteSpace(ImplOptions.TestFile)) { // In contrast to the if-else block for validation above, we do not throw a warning if test file is provided // because this is TrainTest command. if (trainer.Info.SupportsTest) { ch.Trace("Constructing the test pipeline"); IDataView testPipeUsedInTrainer = CreateRawLoader(dataFile: ImplOptions.TestFile); testPipeUsedInTrainer = ApplyTransformUtils.ApplyAllTransformsToData(Host, trainPipe, testPipeUsedInTrainer); testDataUsedInTrainer = new RoleMappedData(testPipeUsedInTrainer, data.Schema.GetColumnRoleNames()); } } var predictor = TrainUtils.Train(Host, ch, data, trainer, validData, ImplOptions.Calibrator, ImplOptions.MaxCalibrationExamples, ImplOptions.CacheData, inputPredictor, testDataUsedInTrainer); ILegacyDataLoader testPipe; bool hasOutfile = !string.IsNullOrEmpty(ImplOptions.OutputModelFile); var tempFilePath = hasOutfile ? null : Path.Combine(((IHostEnvironmentInternal)Host).TempFilePath, Path.GetRandomFileName()); using (var file = new SimpleFileHandle(ch, hasOutfile ? ImplOptions.OutputModelFile : tempFilePath, true, !hasOutfile)) { TrainUtils.SaveModel(Host, ch, file, predictor, data, cmd); ch.Trace("Constructing the testing pipeline"); using (var stream = file.OpenReadStream()) using (var rep = RepositoryReader.Open(stream, ch)) testPipe = LoadLoader(rep, ImplOptions.TestFile, true); } // Score. ch.Trace("Scoring and evaluating"); ch.Assert(ImplOptions.Scorer == null || ImplOptions.Scorer is ICommandLineComponentFactory, "TrainTestCommand should only be used from the command line."); IDataScorerTransform scorePipe = ScoreUtils.GetScorer(ImplOptions.Scorer, predictor, testPipe, features, group, customCols, Host, data.Schema); // Evaluate. var evaluator = ImplOptions.Evaluator?.CreateComponent(Host) ?? EvaluateUtils.GetEvaluator(Host, scorePipe.Schema); var dataEval = new RoleMappedData(scorePipe, label, features, group, weight, name, customCols, opt: true); var metrics = evaluator.Evaluate(dataEval); MetricWriter.PrintWarnings(ch, metrics); evaluator.PrintFoldResults(ch, metrics); if (!metrics.TryGetValue(MetricKinds.OverallMetrics, out var overall)) { throw ch.Except("No overall metrics found"); } overall = evaluator.GetOverallResults(overall); MetricWriter.PrintOverallMetrics(Host, ch, ImplOptions.SummaryFilename, overall, 1); evaluator.PrintAdditionalMetrics(ch, metrics); Dictionary <string, IDataView>[] metricValues = { metrics }; SendTelemetryMetric(metricValues); if (!string.IsNullOrWhiteSpace(ImplOptions.OutputDataFile)) { var perInst = evaluator.GetPerInstanceMetrics(dataEval); var perInstData = new RoleMappedData(perInst, label, null, group, weight, name, customCols); var idv = evaluator.GetPerInstanceDataViewToSave(perInstData); MetricWriter.SavePerInstance(Host, ch, ImplOptions.OutputDataFile, idv); } }
private IEnumerable<int> GetPredictorSamples(int blockSize, int[] warmupSamples, IPredictor predictor, IEnumerator<int> residualData) { int lastSample = 0; if (warmupSamples.Length > 0) { for (int i = 0; i < warmupSamples.Length; i++) { yield return warmupSamples[i]; } lastSample = warmupSamples[warmupSamples.Length - 1]; } for (int i = warmupSamples.Length; i < blockSize; i++) { if (!residualData.MoveNext()) throw new FlacException("Not enough residual data"); int x = predictor.Next(lastSample); int e = residualData.Current; int nextSample = x + e; yield return nextSample; lastSample = nextSample; } if (residualData.MoveNext()) throw new FlacException("Not all residual data is decoded"); }
public static IDataScorerTransform GetScorer(IPredictor predictor, RoleMappedData data, IHostEnvironment env, RoleMappedSchema trainSchema) { var sc = GetScorerComponentAndMapper(predictor, null, data.Schema, env, null, out var mapper); return(sc.CreateComponent(env, data.Data, mapper, trainSchema)); }
public BinaryPredictionTransformer <ParameterMixingCalibratedPredictor> Train(IDataView trainData, IPredictor initialPredictor = null) => TrainTransformer(trainData, initPredictor: initialPredictor);
internal override void PrepareData(IHostEnvironment env, IDataView input, out RoleMappedData roleMappedData, out IPredictor predictor) { Contracts.CheckValue(env, nameof(env)); env.CheckValue(input, nameof(input)); input = TransformModel.Apply(env, input); roleMappedData = new RoleMappedData(input, _roleMappings, opt: true); predictor = Predictor; }