private protected override void CheckAndUpdateParametersBeforeTraining(IChannel ch, RoleMappedData data, float[] labels, int[] groups)
 {
     Options["objective"] = "regression";
     // Add default metric.
     if (!Options.ContainsKey("metric"))
     {
         Options["metric"] = "l2";
     }
 }
 private protected override void CheckAndUpdateParametersBeforeTraining(IChannel ch, RoleMappedData data, float[] labels, int[] groups)
 {
     Host.AssertValue(ch);
     Options["objective"] = "lambdarank";
     ch.CheckValue(groups, nameof(groups));
     // Add default metric.
     if (!Options.ContainsKey("metric"))
     {
         Options["metric"] = "ndcg";
     }
     // Only output one ndcg score.
     Options["eval_at"] = "5";
 }
Esempio n. 3
0
        private protected override void CheckAndUpdateParametersBeforeTraining(IChannel ch, RoleMappedData data, float[] labels, int[] groups)
        {
            Host.AssertValue(ch);
            ch.Assert(PredictionKind == PredictionKind.MulticlassClassification);
            ch.Assert(_numClass > 1);
            Options["num_class"] = _numClass;
            bool useSoftmax = false;

            if (LightGbmTrainerOptions.UseSoftmax.HasValue)
            {
                useSoftmax = LightGbmTrainerOptions.UseSoftmax.Value;
            }
            else
            {
                if (labels.Length >= _minDataToUseSoftmax)
                {
                    useSoftmax = true;
                }

                ch.Info("Auto-tuning parameters: " + nameof(LightGbmTrainerOptions.UseSoftmax) + " = " + useSoftmax);
            }

            if (useSoftmax)
            {
                Options["objective"] = "multiclass";
            }
            else
            {
                Options["objective"] = "multiclassova";
            }

            // Add default metric.
            if (!Options.ContainsKey("metric"))
            {
                Options["metric"] = "multi_error";
            }
        }