private protected GamTrainerBase(IHostEnvironment env, string name, SchemaShape.Column label, string featureColumnName, string weightCrowGroupColumnName, int numberOfIterations, double learningRate, int maximumBinCountPerFeature) : base(Contracts.CheckRef(env, nameof(env)).Register(name), TrainerUtils.MakeR4VecFeature(featureColumnName), label, TrainerUtils.MakeR4ScalarWeightColumn(weightCrowGroupColumnName)) { GamTrainerOptions = new TOptions(); GamTrainerOptions.NumberOfIterations = numberOfIterations; GamTrainerOptions.LearningRate = learningRate; GamTrainerOptions.MaximumBinCountPerFeature = maximumBinCountPerFeature; GamTrainerOptions.LabelColumnName = label.Name; GamTrainerOptions.FeatureColumnName = featureColumnName; if (weightCrowGroupColumnName != null) { GamTrainerOptions.ExampleWeightColumnName = weightCrowGroupColumnName; } Info = new TrainerInfo(normalization: false, calibration: NeedCalibration, caching: false, supportValid: true); _gainConfidenceInSquaredStandardDeviations = Math.Pow(ProbabilityFunctions.Probit(1 - (1 - GamTrainerOptions.GainConfidenceLevel) * 0.5), 2); _entropyCoefficient = GamTrainerOptions.EntropyCoefficient * 1e-6; InitializeThreads(); }
private protected GamTrainerBase(IHostEnvironment env, string name, SchemaShape.Column label, string featureColumn, string weightColumn, int numIterations, double learningRate, int maxBins) : base(Contracts.CheckRef(env, nameof(env)).Register(name), TrainerUtils.MakeR4VecFeature(featureColumn), label, TrainerUtils.MakeR4ScalarWeightColumn(weightColumn)) { Args = new TArgs(); Args.NumIterations = numIterations; Args.LearningRates = learningRate; Args.MaxBins = maxBins; Args.LabelColumn = label.Name; Args.FeatureColumn = featureColumn; if (weightColumn != null) { Args.WeightColumn = weightColumn; } Info = new TrainerInfo(normalization: false, calibration: NeedCalibration, caching: false, supportValid: true); _gainConfidenceInSquaredStandardDeviations = Math.Pow(ProbabilityFunctions.Probit(1 - (1 - Args.GainConfidenceLevel) * 0.5), 2); _entropyCoefficient = Args.EntropyCoefficient * 1e-6; InitializeThreads(); }
internal MatrixFactorizationTrainer(IHostEnvironment env, Options options) : base(env, LoadNameValue) { const string posError = "Parameter must be positive"; Host.CheckValue(options, nameof(options)); Host.CheckUserArg(options.K > 0, nameof(options.K), posError); Host.CheckUserArg(!options.NumThreads.HasValue || options.NumThreads > 0, nameof(options.NumThreads), posError); Host.CheckUserArg(options.NumIterations > 0, nameof(options.NumIterations), posError); Host.CheckUserArg(options.Lambda > 0, nameof(options.Lambda), posError); Host.CheckUserArg(options.Eta > 0, nameof(options.Eta), posError); Host.CheckUserArg(options.Alpha > 0, nameof(options.Alpha), posError); _fun = (int)options.LossFunction; _lambda = options.Lambda; _k = options.K; _iter = options.NumIterations; _eta = options.Eta; _alpha = options.Alpha; _c = options.C; _threads = options.NumThreads ?? Environment.ProcessorCount; _quiet = options.Quiet; _doNmf = options.NonNegative; Info = new TrainerInfo(normalization: false, caching: false); LabelName = options.LabelColumnName; MatrixColumnIndexName = options.MatrixColumnIndexColumnName; MatrixRowIndexName = options.MatrixRowIndexColumnName; }
private protected EnsembleTrainerBase(ArgumentsBase args, IHostEnvironment env, string name) : base(env, name) { Args = args; using (var ch = Host.Start("Init")) { ch.CheckUserArg(Utils.Size(Args.BasePredictors) > 0, nameof(Args.BasePredictors), "This should have at-least one value"); NumModels = Args.NumModels ?? (Args.BasePredictors.Length == 1 ? DefaultNumModels : Args.BasePredictors.Length); ch.CheckUserArg(NumModels > 0, nameof(Args.NumModels), "Must be positive, or null to indicate numModels is the number of base predictors"); if (Utils.Size(Args.BasePredictors) > NumModels) { ch.Warning("The base predictor count is greater than models count. Some of the base predictors will be ignored."); } _subsetSelector = Args.SamplingType.CreateComponent(Host); Trainers = new ITrainer <IPredictorProducing <TOutput> > [NumModels]; for (int i = 0; i < Trainers.Length; i++) { Trainers[i] = Args.BasePredictors[i % Args.BasePredictors.Length].CreateInstance(Host); } // We infer normalization and calibration preferences from the trainers. However, even if the internal trainers // don't need caching we are performing multiple passes over the data, so it is probably appropriate to always cache. Info = new TrainerInfo( normalization: Trainers.Any(t => t.Info.NeedNormalization), calibration: Trainers.Any(t => t.Info.NeedCalibration)); ch.Done(); } }
public SymSgdClassificationTrainer(IHostEnvironment env, Arguments args) : base(env, LoadNameValue) { args.Check(Host); _args = args; Info = new TrainerInfo(); }
protected TrainerBase(IHostEnvironment env, TrainerInfo trainerInfo, string featureColumn, string labelColumn) { _env = env; _featureCol = featureColumn; _labelCol = labelColumn; TrainerInfo = trainerInfo; }
/// <summary> /// Initializes the <see cref="MetaMulticlassTrainer{TTransformer, TModel}"/> from the Arguments class. /// </summary> /// <param name="env">The private instance of the <see cref="IHostEnvironment"/>.</param> /// <param name="args">The legacy arguments <see cref="ArgumentsBase"/>class.</param> /// <param name="name">The component name.</param> /// <param name="labelColumn">The label column for the metalinear trainer and the binary trainer.</param> /// <param name="singleEstimator">The binary estimator.</param> /// <param name="calibrator">The calibrator. If a calibrator is not explicitly provided, it will default to <see cref="PlattCalibratorCalibratorTrainer"/></param> internal MetaMulticlassTrainer(IHostEnvironment env, ArgumentsBase args, string name, string labelColumn = null, TScalarTrainer singleEstimator = null, ICalibratorTrainer calibrator = null) { Host = Contracts.CheckRef(env, nameof(env)).Register(name); Host.CheckValue(args, nameof(args)); Args = args; if (labelColumn != null) { LabelColumn = new SchemaShape.Column(labelColumn, SchemaShape.Column.VectorKind.Scalar, NumberType.U4, true); } // Create the first trainer so errors in the args surface early. _trainer = singleEstimator ?? CreateTrainer(); Calibrator = calibrator ?? new PlattCalibratorTrainer(env); if (args.Calibrator != null) { Calibrator = args.Calibrator.CreateComponent(Host); } // Regarding caching, no matter what the internal predictor, we're performing many passes // simply by virtue of this being a meta-trainer, so we will still cache. Info = new TrainerInfo(normalization: _trainer.Info.NeedNormalization); }
/// <summary> /// Legacy constructor initializing a new instance of <see cref="MatrixFactorizationTrainer"/> through the legacy /// <see cref="Arguments"/> class. /// </summary> /// <param name="env">The private instance of <see cref="IHostEnvironment"/>.</param> /// <param name="args">An instance of the legacy <see cref="Arguments"/> to apply advanced parameters to the algorithm.</param> public MatrixFactorizationTrainer(IHostEnvironment env, Arguments args) : base(env, LoadNameValue) { const string posError = "Parameter must be positive"; Host.CheckValue(args, nameof(args)); Host.CheckUserArg(args.K > 0, nameof(args.K), posError); Host.CheckUserArg(!args.NumThreads.HasValue || args.NumThreads > 0, nameof(args.NumThreads), posError); Host.CheckUserArg(args.NumIterations > 0, nameof(args.NumIterations), posError); Host.CheckUserArg(args.Lambda > 0, nameof(args.Lambda), posError); Host.CheckUserArg(args.Eta > 0, nameof(args.Eta), posError); Host.CheckUserArg(args.Alpha > 0, nameof(args.Alpha), posError); _fun = (int)args.LossFunction; _lambda = args.Lambda; _k = args.K; _iter = args.NumIterations; _eta = args.Eta; _alpha = args.Alpha; _c = args.C; _threads = args.NumThreads ?? Environment.ProcessorCount; _quiet = args.Quiet; _doNmf = args.NonNegative; Info = new TrainerInfo(normalization: false, caching: false); }
/// <summary> /// Initializing a new instance of <see cref="MatrixFactorizationTrainer"/>. /// </summary> /// <param name="env">The private instance of <see cref="IHostEnvironment"/>.</param> /// <param name="matrixColumnIndexColumnName">The name of the column hosting the matrix's column IDs.</param> /// <param name="matrixRowIndexColumnName">The name of the column hosting the matrix's row IDs.</param> /// <param name="labelColumn">The name of the label column.</param> /// <param name="advancedSettings">A delegate to apply all the advanced arguments to the algorithm.</param> public MatrixFactorizationTrainer(IHostEnvironment env, string matrixColumnIndexColumnName, string matrixRowIndexColumnName, string labelColumn = DefaultColumnNames.Label, Action <Arguments> advancedSettings = null) : base(env, LoadNameValue) { var args = new Arguments(); advancedSettings?.Invoke(args); _fun = (int)args.LossFunction; _lambda = args.Lambda; _k = args.K; _iter = args.NumIterations; _eta = args.Eta; _alpha = args.Alpha; _c = args.C; _threads = args.NumThreads ?? Environment.ProcessorCount; _quiet = args.Quiet; _doNmf = args.NonNegative; Info = new TrainerInfo(normalization: false, caching: false); LabelName = labelColumn; MatrixColumnIndexName = matrixColumnIndexColumnName; MatrixRowIndexName = matrixRowIndexColumnName; }
/// <summary> /// Initializing a new instance of <see cref="FieldAwareFactorizationMachineTrainer"/>. /// </summary> /// <param name="env">The private instance of <see cref="IHostEnvironment"/>.</param> /// <param name="featureColumns">The name of column hosting the features.</param> /// <param name="labelColumn">The name of the label column.</param> /// <param name="advancedSettings">A delegate to apply all the advanced arguments to the algorithm.</param> /// <param name="weights">The name of the optional weights' column.</param> /// <param name="context">The <see cref="TrainerEstimatorContext"/> for additional input data to training.</param> public FieldAwareFactorizationMachineTrainer(IHostEnvironment env, string[] featureColumns, string labelColumn = DefaultColumnNames.Label, string weights = null, TrainerEstimatorContext context = null, Action <Arguments> advancedSettings = null) : base(env, LoadName) { var args = new Arguments(); advancedSettings?.Invoke(args); Initialize(env, args); Info = new TrainerInfo(supportValid: true, supportIncrementalTrain: true); Context = context; FeatureColumns = new SchemaShape.Column[featureColumns.Length]; for (int i = 0; i < featureColumns.Length; i++) { FeatureColumns[i] = new SchemaShape.Column(featureColumns[i], SchemaShape.Column.VectorKind.Vector, NumberType.R4, false); } LabelColumn = new SchemaShape.Column(labelColumn, SchemaShape.Column.VectorKind.Scalar, BoolType.Instance, false); WeightColumn = weights != null ? new SchemaShape.Column(weights, SchemaShape.Column.VectorKind.Scalar, NumberType.R4, false) : null; }
public IHttpActionResult TrainerViewData(TrainerInfo objTrainerInfo) { StringBuilder traceLog = null; ServiceResponse <TrainerViewVM> objResponce = null; try { traceLog = new StringBuilder(); traceLog.AppendLine("Start: GetTrainerViewData() Request TrainerID-" + objTrainerInfo.TrainerId); objResponce = new ServiceResponse <TrainerViewVM>(); objResponce.IsResultTrue = true; objResponce.jsonData = HomeRequestBL.GetTrainerViewData(objTrainerInfo.TrainerId, objTrainerInfo.NotificationID); return(Ok(objResponce)); } catch (Exception ex) { LogManager.LogManagerInstance.WriteErrorLog(ex); return(BadRequest(ex.Message)); } finally { traceLog.AppendLine("End:GetTrainerViewData() Reponse Data:-Result Status-" + objResponce.IsResultTrue + ",Fetch DateTime-" + DateTime.Now.ToLongDateString()); LogManager.LogManagerInstance.WriteTraceLog(traceLog); traceLog = null; } }
public IHttpActionResult GetVideos(TrainerInfo objTrainerInfo) { StringBuilder traceLog = null; ServiceResponse <List <VideoInfo> > objResponce = null; try { traceLog = new StringBuilder(); traceLog.AppendLine("Start: GetVideos() Request Data:-TrainerId-" + objTrainerInfo.TrainerId); objResponce = new ServiceResponse <List <VideoInfo> >(); objResponce.IsResultTrue = true; objResponce.jsonData = HomeRequestBL.GetVideos(objTrainerInfo.TrainerId); return(Ok(objResponce)); } catch (Exception ex) { LogManager.LogManagerInstance.WriteErrorLog(ex); return(BadRequest(ex.Message)); } finally { traceLog.AppendLine("End:GetVideos() Response Result Ststus-" + objResponce.IsResultTrue + ",Fetched DateTime-" + DateTime.Now.ToLongDateString()); LogManager.LogManagerInstance.WriteTraceLog(traceLog); traceLog = null; } }
public ActionResult Edit(TrainerInfo trainerInfo) { if (!ModelState.IsValid) { return(View("~/Views/ErrorValidations/Null.cshtml")); } var trainerInfoInDb = _context.TrainerInfos.SingleOrDefault(c => c.TrainerInfoId == trainerInfo.TrainerInfoId); if (trainerInfoInDb == null) { return(HttpNotFound()); } trainerInfoInDb.TrainerId = trainerInfo.TrainerId; trainerInfoInDb.Full_Name = trainerInfo.Full_Name; trainerInfoInDb.Email = trainerInfo.Email; trainerInfoInDb.Working_Place = trainerInfo.Working_Place; _context.SaveChanges(); if (User.IsInRole("Training Staff")) { return(RedirectToAction("Index")); } if (User.IsInRole("Trainer")) { return(RedirectToAction("Mine")); } return(RedirectToAction("Index")); }
internal MatrixFactorizationTrainer(IHostEnvironment env, string labelColumnName, string matrixColumnIndexColumnName, string matrixRowIndexColumnName, int approximationRank = Defaults.ApproximationRank, double learningRate = Defaults.LearningRate, int numIterations = Defaults.NumIterations) { Contracts.CheckValue(env, nameof(env)); _host = env.Register(LoadNameValue); var args = new Options(); _fun = (int)args.LossFunction; _k = approximationRank; _iter = numIterations; _eta = learningRate; _alpha = args.Alpha; _lambda = args.Lambda; _c = args.C; _threads = args.NumberOfThreads ?? Environment.ProcessorCount; _quiet = args.Quiet; _doNmf = args.NonNegative; _info = new TrainerInfo(normalization: false, caching: false); LabelName = labelColumnName; MatrixColumnIndexName = matrixColumnIndexColumnName; MatrixRowIndexName = matrixRowIndexColumnName; }
internal MatrixFactorizationTrainer(IHostEnvironment env, Options options) { Contracts.CheckValue(env, nameof(env)); _host = env.Register(LoadNameValue); const string posError = "Parameter must be positive"; _host.CheckValue(options, nameof(options)); _host.CheckUserArg(options.ApproximationRank > 0, nameof(options.ApproximationRank), posError); _host.CheckUserArg(!options.NumberOfThreads.HasValue || options.NumberOfThreads > 0, nameof(options.NumberOfThreads), posError); _host.CheckUserArg(options.NumberOfIterations > 0, nameof(options.NumberOfIterations), posError); _host.CheckUserArg(options.Lambda > 0, nameof(options.Lambda), posError); _host.CheckUserArg(options.LearningRate > 0, nameof(options.LearningRate), posError); _host.CheckUserArg(options.Alpha > 0, nameof(options.Alpha), posError); _fun = (int)options.LossFunction; _lambda = options.Lambda; _k = options.ApproximationRank; _iter = options.NumberOfIterations; _eta = options.LearningRate; _alpha = options.Alpha; _c = options.C; _threads = options.NumberOfThreads ?? Environment.ProcessorCount; _quiet = options.Quiet; _doNmf = options.NonNegative; _info = new TrainerInfo(normalization: false, caching: false); LabelName = options.LabelColumnName; MatrixColumnIndexName = options.MatrixColumnIndexColumnName; MatrixRowIndexName = options.MatrixRowIndexColumnName; }
/// <summary> /// Initializes a new instance of <see cref="SymSgdClassificationTrainer"/> /// </summary> internal SymSgdClassificationTrainer(IHostEnvironment env, Arguments args) : base(Contracts.CheckRef(env, nameof(env)).Register(LoadNameValue), TrainerUtils.MakeR4VecFeature(args.FeatureColumn), TrainerUtils.MakeBoolScalarLabel(args.LabelColumn)) { args.Check(Host); _args = args; Info = new TrainerInfo(); }
public ActionResult DeleteConfirmed(int id) { TrainerInfo trainerInfo = _context.TrainerInfos.Find(id); _context.TrainerInfos.Remove(trainerInfo); _context.SaveChanges(); return(RedirectToAction("Index")); }
/// <summary> /// Initializes a new instance of <see cref="SymSgdClassificationTrainer"/> /// </summary> internal SymSgdClassificationTrainer(IHostEnvironment env, Options options) : base(Contracts.CheckRef(env, nameof(env)).Register(LoadNameValue), TrainerUtils.MakeR4VecFeature(options.FeatureColumn), TrainerUtils.MakeBoolScalarLabel(options.LabelColumn)) { Host.CheckValue(options, nameof(options)); options.Check(Host); _options = options; Info = new TrainerInfo(supportIncrementalTrain: true); }
internal MetaMulticlassTrainer(IHostEnvironment env, TArgs args, string name) : base(env, name) { Host.CheckValue(args, nameof(args)); Args = args; // Create the first trainer so errors in the args surface early. _trainer = CreateTrainer(); // Regarding caching, no matter what the internal predictor, we're performing many passes // simply by virtue of this being a meta-trainer, so we will still cache. Info = new TrainerInfo(normalization: _trainer.Info.NeedNormalization); }
protected OnlineLinearTrainer(TArguments args, IHostEnvironment env, string name) : base(env, name) { Contracts.CheckValue(args, nameof(args)); Contracts.CheckUserArg(args.NumIterations > 0, nameof(args.NumIterations), UserErrorPositive); Contracts.CheckUserArg(args.InitWtsDiameter >= 0, nameof(args.InitWtsDiameter), UserErrorNonNegative); Contracts.CheckUserArg(args.StreamingCacheSize > 0, nameof(args.StreamingCacheSize), UserErrorPositive); Args = args; // REVIEW: Caching could be false for one iteration, if we got around the whole shuffling issue. Info = new TrainerInfo(calibration: NeedCalibration, supportIncrementalTrain: true); }
protected OnlineLinearTrainer(OnlineLinearArguments args, IHostEnvironment env, string name, SchemaShape.Column label) : base(Contracts.CheckRef(env, nameof(env)).Register(name), TrainerUtils.MakeR4VecFeature(args.FeatureColumn), label, TrainerUtils.MakeR4ScalarWeightColumn(args.InitialWeights)) { Contracts.CheckValue(args, nameof(args)); Contracts.CheckUserArg(args.NumIterations > 0, nameof(args.NumIterations), UserErrorPositive); Contracts.CheckUserArg(args.InitWtsDiameter >= 0, nameof(args.InitWtsDiameter), UserErrorNonNegative); Contracts.CheckUserArg(args.StreamingCacheSize > 0, nameof(args.StreamingCacheSize), UserErrorPositive); Args = args; Name = name; // REVIEW: Caching could be false for one iteration, if we got around the whole shuffling issue. Info = new TrainerInfo(calibration: NeedCalibration, supportIncrementalTrain: true); }
// Start is called before the first frame update void Start() { int index = 0; foreach (Trainer trainer in gm.trainers) { TrainerInfo trainerInfo = Instantiate(Resources.Load("Prefabs/TrainerInfo") as GameObject, layout.transform).GetComponent <TrainerInfo>(); trainerInfo.SetTrainer(trainer); trainerInfo.SetHpColor(trainerColor[index]); trainerInfos.Add(trainerInfo); index++; } }
private static void AddNormalizationTransforms(MLContext context, TrainerInfo trainerInfo, ICollection <SuggestedTransform> transforms) { // Only add normalization if trainer needs it if (!trainerInfo.NeedNormalization) { return; } var transform = NormalizingExtension.CreateSuggestedTransform(context, DefaultColumnNames.Features, DefaultColumnNames.Features); transforms.Add(transform); }
/// <summary> /// Initializes a new instance of <see cref="SymSgdClassificationTrainer"/> /// </summary> /// <param name="env">The private instance of <see cref="IHostEnvironment"/>.</param> /// <param name="labelColumn">The name of the label column.</param> /// <param name="featureColumn">The name of the feature column.</param> /// <param name="advancedSettings">A delegate to apply all the advanced arguments to the algorithm.</param> public SymSgdClassificationTrainer(IHostEnvironment env, string featureColumn, string labelColumn, Action <Arguments> advancedSettings = null) : base(Contracts.CheckRef(env, nameof(env)).Register(LoadNameValue), TrainerUtils.MakeR4VecFeature(featureColumn), TrainerUtils.MakeBoolScalarLabel(labelColumn)) { _args = new Arguments(); // Apply the advanced args, if the user supplied any. _args.Check(Host); advancedSettings?.Invoke(_args); _args.FeatureColumn = featureColumn; _args.LabelColumn = labelColumn; Info = new TrainerInfo(); }
public ActionResult Details(int?id) { if (id == null) { return(new HttpStatusCodeResult(HttpStatusCode.BadRequest)); } TrainerInfo trainerInfo = _context.TrainerInfos.Find(id); if (trainerInfo == null) { return(HttpNotFound()); } return(View(trainerInfo)); }
public ActionResult Create() { //Get trainer var role = (from r in _context.Roles where r.Name.Contains("Trainer") select r) .FirstOrDefault(); var users = _context.Users.Where(x => x.Roles.Select(y => y.RoleId) .Contains(role.Id)) .ToList(); //Get Topic var topics = _context.Topics.ToList(); var newTrainer = new TrainerInfo { Trainers = users, }; return(View(newTrainer)); }
public FieldAwareFactorizationMachineTrainer(IHostEnvironment env, Arguments args) : base(env, LoadName) { Host.CheckUserArg(args.LatentDim > 0, nameof(args.LatentDim), "Must be positive"); Host.CheckUserArg(args.LambdaLinear >= 0, nameof(args.LambdaLinear), "Must be non-negative"); Host.CheckUserArg(args.LambdaLatent >= 0, nameof(args.LambdaLatent), "Must be non-negative"); Host.CheckUserArg(args.LearningRate > 0, nameof(args.LearningRate), "Must be positive"); Host.CheckUserArg(args.Iters >= 0, nameof(args.Iters), "Must be non-negative"); _latentDim = args.LatentDim; _latentDimAligned = FieldAwareFactorizationMachineUtils.GetAlignedVectorLength(_latentDim); _lambdaLinear = args.LambdaLinear; _lambdaLatent = args.LambdaLatent; _learningRate = args.LearningRate; _numIterations = args.Iters; _norm = args.Norm; _shuffle = args.Shuffle; _verbose = args.Verbose; _radius = args.Radius; Info = new TrainerInfo(); }
public ActionResult Create(TrainerInfo trainerInfo) { if (!ModelState.IsValid) { return(View("~/Views/ErrorValidations/Null.cshtml")); } var newTrainerInfo = new TrainerInfo { TrainerId = trainerInfo.TrainerId, Full_Name = trainerInfo.Full_Name, Email = trainerInfo.Email, Working_Place = trainerInfo.Working_Place, Phone = trainerInfo.Phone }; _context.TrainerInfos.Add(newTrainerInfo); _context.SaveChanges(); return(RedirectToAction("Index")); }
/// <summary> /// Initializes the <see cref="MetaMulticlassClassificationTrainer{TTransformer, TModel}"/> from the <see cref="OptionsBase"/> class. /// </summary> /// <param name="env">The private instance of the <see cref="IHostEnvironment"/>.</param> /// <param name="options">The legacy arguments <see cref="OptionsBase"/>class.</param> /// <param name="name">The component name.</param> /// <param name="labelColumn">The label column for the metalinear trainer and the binary trainer.</param> /// <param name="singleEstimator">The binary estimator.</param> /// <param name="calibrator">The calibrator. If a calibrator is not explicitly provided, it will default to <see cref="PlattCalibratorTrainer"/></param> internal MetaMulticlassClassificationTrainer(IHostEnvironment env, OptionsBase options, string name, string labelColumn = null, TScalarTrainer singleEstimator = null, ICalibratorTrainer calibrator = null) { Host = Contracts.CheckRef(env, nameof(env)).Register(name); Host.CheckValue(options, nameof(options)); Args = options; if (labelColumn != null) LabelColumn = new SchemaShape.Column(labelColumn, SchemaShape.Column.VectorKind.Scalar, NumberDataViewType.UInt32, true); Trainer = singleEstimator ?? CreateTrainer(); Calibrator = calibrator ?? new PlattCalibratorTrainer(env); if (options.Calibrator != null) Calibrator = options.Calibrator.CreateComponent(Host); // Regarding caching, no matter what the internal predictor, we're performing many passes // simply by virtue of this being a meta-trainer, so we will still cache. Info = new TrainerInfo(normalization: Trainer.Info.NeedNormalization); }
internal FieldAwareFactorizationMachineTrainer(IHostEnvironment env, string[] featureColumns, string labelColumn = DefaultColumnNames.Label, string weights = null) : base(env, LoadName) { var args = new Options(); Initialize(env, args); Info = new TrainerInfo(supportValid: true, supportIncrementalTrain: true); FeatureColumns = new SchemaShape.Column[featureColumns.Length]; for (int i = 0; i < featureColumns.Length; i++) { FeatureColumns[i] = new SchemaShape.Column(featureColumns[i], SchemaShape.Column.VectorKind.Vector, NumberType.R4, false); } LabelColumn = new SchemaShape.Column(labelColumn, SchemaShape.Column.VectorKind.Scalar, BoolType.Instance, false); WeightColumn = weights != null ? new SchemaShape.Column(weights, SchemaShape.Column.VectorKind.Scalar, NumberType.R4, false) : default; }