コード例 #1
0
        public static void RocFromSortedMatches(List <PeakBoundsMatch> matches, NormalizeType normalizer, out PointPairList rocPoints)
        {
            double scalingFactor = GetScalingFactor(normalizer, matches);

            rocPoints = new PointPairList();
            int truePositives  = 0;
            int falsePositives = 0;

            foreach (var match in matches)
            {
                if (match.IsFalsePositive)
                {
                    falsePositives++;
                }
                if (match.IsMatch)
                {
                    truePositives++;
                }
                // Null qValues get the worst score
                double qValue   = match.QValue ?? 1.0;
                var    rocPoint = new PointPair(falsePositives / (double)(truePositives + falsePositives), truePositives / scalingFactor)
                {
                    Tag = qValue
                };
                rocPoints.Add(rocPoint);
            }
        }
コード例 #2
0
        protected override IParsedElement TryParseElement(List <string> lines, int index, out int end)
        {
            end = index + 1;
            var text = lines[index].Trim();

            while (text.EndsWith(","))
            {
                text = $"{text} {lines[end++].Trim()}";
            }

            var test = NormalizeType.Replace(text, string.Empty);

            if (!FuncRegex.IsMatch(test) || VoidFuncRegex.IsMatch(test))
            {
                return(null);
            }

            var cppTypeMatch = FuncTypeRegex.Match(test);

            if (!cppTypeMatch.Success)
            {
                return(null);
            }

            var nameMatch = FuncNameRegex.Match(test);

            if (!nameMatch.Success)
            {
                return(null);
            }

            var paramsMatch = ParamsRegex.Match(test);

            if (!paramsMatch.Success)
            {
                return(null);
            }

            var netType = cppTypeMatch.Value;

            netType = GetKnownTypeOrDefault(netType);

            var field = new ParsedFunc
            {
                Type      = netType,
                CppType   = cppTypeMatch.Value,
                Name      = ToTitleCase(nameMatch.Value),
                CppName   = nameMatch.Value,
                Params    = TryParseParams(paramsMatch.Value),
                CppParams = paramsMatch.Value,
                //Comment = coment
            };

            return(field);
        }
コード例 #3
0
        public void UpdateYAxis(ComboBox comboBox)
        {
            string selectedItem = (string)comboBox.SelectedItem;

            zedGraphRoc.GraphPane.YAxis.Title.Text   = selectedItem;
            zedGraphFiles.GraphPane.YAxis.Title.Text = selectedItem;
            Normalizer = (selectedItem == Resources.ComparePeakPickingDlg_ComparePeakPickingDlg_Total_Correct_Peaks) ? NormalizeType.total :
                         (selectedItem == Resources.ComparePeakPickingDlg_ComparePeakPickingDlg_Fraction_of_Manual_ID_s) ? NormalizeType.frac_manual
                                                                                                                        : NormalizeType.frac_all;
            comboBoxYAxis.SelectedItem      = selectedItem;
            comboBoxFilesYAxis.SelectedItem = selectedItem;
        }
コード例 #4
0
        public static void MakeRocLists(ComparePeakBoundaries comparer, NormalizeType normalizer, out PointPairList rocPoints)
        {
            var matches = comparer.Matches;

            if (comparer.HasNoScores)
            {
                matches.Sort(PeakBoundsMatch.CompareQValue);
            }
            else
            {
                matches.Sort(PeakBoundsMatch.CompareScore);
            }
            RocFromSortedMatches(matches, normalizer, out rocPoints);
        }
コード例 #5
0
        /// <summary>
        /// Convert normalize type to the xgboost parameter string.
        /// </summary>
        /// <param name="type"></param>
        /// <returns></returns>
        public static string ToXGBoostString(this NormalizeType type)
        {
            switch (type)
            {
            case NormalizeType.Tree:
                return("tree");

            case NormalizeType.Forest:
                return("forest");

            default:
                throw new ArgumentException("Unknown normalize type: " + type);
            }
        }
コード例 #6
0
ファイル: Angle.cs プロジェクト: RoqueDeicide/CryCIL
 /// <summary>
 /// Converts radians to degrees.
 /// </summary>
 /// <param name="radians"> Angle in radians. </param>
 /// <param name="norm">   
 /// Indicate whether we should normalize the angle, and if so then how.
 /// </param>
 /// <returns> Angle in degrees. </returns>
 public static float Degree(float radians, NormalizeType norm)
 {
     float degrees = (float)(radians * (180 / Math.PI));
     switch (norm)
     {
         case NormalizeType.Normalize180:
             while (degrees <= -180) degrees += 360;
             while (degrees > 180) degrees -= 360;
             break;
         case NormalizeType.Normalize360:
             while (degrees <= 0) degrees += 360;
             while (degrees > 360) degrees -= 360;
             break;
     }
     return degrees;
 }
コード例 #7
0
ファイル: StringUtility.cs プロジェクト: Fsamot/HtmlUi
        /// <summary>
        /// Normalizes the specified value.
        /// </summary>
        /// <param name="value">The value.</param>
        /// <param name="normalizeType">The normalize type.</param>
        /// <returns></returns>
        public static string Normalize(string value, NormalizeType normalizeType)
        {
            if (string.IsNullOrWhiteSpace(value))
            {
                return(value);
            }

            if (normalizeType == NormalizeType.CamelCase)
            {
                return(Char.ToLowerInvariant(value[0]) + value.Substring(1));
            }
            else if (normalizeType == NormalizeType.PascalCase)
            {
                return(Char.ToUpperInvariant(value[0]) + value.Substring(1));
            }

            return(value);
        }
コード例 #8
0
 public static double GetScalingFactor(NormalizeType normalizer, IList <PeakBoundsMatch> matches)
 {
     if (normalizer == NormalizeType.total)
     {
         return(1);
     }
     else if (normalizer == NormalizeType.frac_manual)
     {
         return(matches.Count(match => !match.IsMissingTruePeak));
     }
     else if (normalizer == NormalizeType.frac_all)
     {
         return(matches.Count);
     }
     else
     {
         throw new InvalidDataException("Unrecognized y axis scaling option");  // Not L10N
     }
 }
コード例 #9
0
    public RenderTexture IDFT(NormalizeType normalizeType = NormalizeType.IDFT)
    {
        FFTUWithRadix(FFTType.IDFT, 32);
        FFTVWithRadix(FFTType.IDFT, 32);
        if (normalizeType == NormalizeType.IDFT)
        {
            Normalize(1.0f / n);
        }
        else if (normalizeType == NormalizeType.SYMMETRIC)
        {
            Normalize(1.0f / Mathf.Sqrt(n));
        }
        else
        {
            SwapTex();
        }

        return(fftTextureOut_);
    }
コード例 #10
0
 public NoiseMapProperties(
     long seed,
     int sizeBound,
     Vector2 offset,
     float scale,
     float chunkScale,
     int octaves,
     float lacunarity,
     float persistance,
     NormalizeType normalizeType)
 {
     this.seed          = seed;
     this.sizeBound     = sizeBound;
     this.offset        = offset;
     this.scale         = scale;
     this.chunkScale    = chunkScale;
     this.octaves       = octaves;
     this.lacunarity    = lacunarity;
     this.persistance   = persistance;
     this.normalizeType = normalizeType;
 }
コード例 #11
0
        public static void RocFromSortedMatches(List <PeakBoundsMatch> matches, NormalizeType normalizer, out PointPairList rocPoints)
        {
            double scalingFactor = GetScalingFactor(normalizer, matches);

            rocPoints = new PointPairList();
            int truePositives  = 0;
            int falsePositives = 0;

            foreach (var match in matches)
            {
                if (match.IsFalsePositive)
                {
                    falsePositives++;
                }
                if (match.IsPickedApexBetweenCuratedBoundaries)
                {
                    truePositives++;
                }
                var rocPoint = new PointPair(falsePositives / (double)(truePositives + falsePositives), truePositives / scalingFactor);
                rocPoints.Add(rocPoint);
            }
        }
コード例 #12
0
ファイル: Angle.cs プロジェクト: RoqueDeicide/CryCIL
 /// <summary>
 /// Converts radians to degrees.
 /// </summary>
 /// <param name="radians"> Angle in radians. </param>
 /// <param name="norm">   
 /// Indicate whether we should normalize the angle, and if so then how.
 /// </param>
 /// <returns> Angle in degrees. </returns>
 public static double Degree(double radians, NormalizeType norm)
 {
     double degrees = radians * (180 / Math.PI);
     switch (norm)
     {
         case NormalizeType.Normalize180:
             while (degrees <= -180) degrees += 360;
             while (degrees > 180) degrees -= 360;
             break;
         case NormalizeType.Normalize360:
             while (degrees <= 0) degrees += 360;
             while (degrees > 360) degrees -= 360;
             break;
     }
     return degrees;
 }
コード例 #13
0
ファイル: Angle.cs プロジェクト: RoqueDeicide/CryCIL
 /// <summary>
 /// Converts degrees to radians.
 /// </summary>
 /// <param name="degrees"> Angle in degrees. </param>
 /// <param name="norm">   
 /// Indicate whether we should normalize the angle, and if so then how.
 /// </param>
 /// <returns> Angle in radians. </returns>
 public static float Radian(float degrees, NormalizeType norm)
 {
     double radians = degrees * Math.PI / 180;
     switch (norm)
     {
         case NormalizeType.Normalize180:
             while (radians <= -Math.PI) radians += MathHelpers.PI2;
             while (radians > Math.PI) radians -= MathHelpers.PI2;
             break;
         case NormalizeType.Normalize360:
             while (radians <= 0) radians += MathHelpers.PI2;
             while (radians > MathHelpers.PI2) radians -= MathHelpers.PI2;
             break;
     }
     return (float)radians;
 }
コード例 #14
0
        public static int[,] Normalize(Complex[,] Output, NormalizeType normalizeType)
        {
            int Width  = Output.GetLength(0);
            int Height = Output.GetLength(1);

            double[,] FourierDouble         = new double[Width, Height];
            double[,] FourierLogDouble      = new double[Width, Height];
            int[,] FourierNormalizedInteger = new int[Width, Height];

            double max = 0;

            if (normalizeType == NormalizeType.Magnitude)
            {
                for (int i = 0; i <= Width - 1; i++)
                {
                    for (int j = 0; j <= Height - 1; j++)
                    {
                        FourierDouble[i, j]    = Output[i, j].Magnitude;
                        FourierLogDouble[i, j] = (double)Math.Log(1 + FourierDouble[i, j]);
                    }
                }

                max = FourierLogDouble[0, 0];
            }
            else
            {
                for (int i = 0; i <= Width - 1; i++)
                {
                    for (int j = 0; j <= Height - 1; j++)
                    {
                        FourierDouble[i, j]    = Output[i, j].Phase;
                        FourierLogDouble[i, j] = (double)Math.Log(1 + Math.Abs(FourierDouble[i, j]));
                    }
                }

                FourierLogDouble[0, 0] = 0;
                max = FourierLogDouble[1, 1];
            }

            for (int i = 0; i <= Width - 1; i++)
            {
                for (int j = 0; j <= Height - 1; j++)
                {
                    if (FourierLogDouble[i, j] > max)
                    {
                        max = FourierLogDouble[i, j];
                    }
                }
            }

            for (int i = 0; i <= Width - 1; i++)
            {
                for (int j = 0; j <= Height - 1; j++)
                {
                    FourierLogDouble[i, j] = FourierLogDouble[i, j] / max;
                }
            }

            if (normalizeType == NormalizeType.Magnitude)
            {
                for (int i = 0; i <= Width - 1; i++)
                {
                    for (int j = 0; j <= Height - 1; j++)
                    {
                        FourierNormalizedInteger[i, j] = (int)(2000 * FourierLogDouble[i, j]);
                    }
                }
            }
            else
            {
                for (int i = 0; i <= Width - 1; i++)
                {
                    for (int j = 0; j <= Height - 1; j++)
                    {
                        FourierNormalizedInteger[i, j] = (int)(255 * FourierLogDouble[i, j]);
                    }
                }
            }

            return(FourierNormalizedInteger);
        }
コード例 #15
0
        /// <summary>
        ///  Regression learner for XGBoost
        /// </summary>
        /// <param name="maximumTreeDepth">Maximum tree depth for base learners. (default is 3)</param>
        /// <param name="learningRate">Boosting learning rate (xgb's "eta"). 0 indicates no limit. (default is 0.1)</param>
        /// <param name="estimators">Number of estimators to fit. (default is 100)</param>
        /// <param name="silent">Whether to print messages while running boosting. (default is false)</param>
        /// <param name="objective">Specify the learning task and the corresponding learning objective. (default is LinearRegression)</param>
        /// <param name="boosterType"> which booster to use, can be gbtree, gblinear or dart.
        /// gbtree and dart use tree based model while gblinear uses linear function (default is gbtree)</param>
        /// <param name="treeMethod">The tree construction algorithm used in XGBoost. See reference paper: https://arxiv.org/abs/1603.02754. (default is auto)</param>
        /// <param name="samplerType">Type of sampling algorithm for DART. (default is uniform)</param>
        /// <param name="normalizeType">Type of normalization algorithm for DART. (default is tree)</param>
        /// <param name="dropoutRate">Dropout rate for DART (a fraction of previous trees to drop during the dropout). (default is 0.0)</param>
        /// <param name="oneDrop">When this is true, at least one tree is always dropped during the dropout.
        /// Allows Binomial-plus-one or epsilon-dropout from the original DART paper. (default is false)</param>
        /// <param name="skipDrop">Probability of skipping the dropout procedure during a boosting iteration. (default is 0.0)
        /// If a dropout is skipped, new trees are added in the same manner as gbtree.
        /// Note that non-zero skip_drop has higher priority than rate_drop or one_drop.</param>
        /// <param name="numberOfThreads">Number of parallel threads used to run xgboost. -1 means use all thread available. (default is -1)</param>
        /// <param name="gamma">Minimum loss reduction required to make a further partition on a leaf node of the tree. (default is 0) </param>
        /// <param name="minChildWeight">Minimum sum of instance weight(Hessian) needed in a child. (default is 1)</param>
        /// <param name="maxDeltaStep">Maximum delta step we allow each tree's weight estimation to be. (default is 0)</param>
        /// <param name="subSample">Subsample ratio of the training instance. (default is 1)</param>
        /// <param name="colSampleByTree">Subsample ratio of columns when constructing each tree. (default is 1)</param>
        /// <param name="colSampleByLevel">Subsample ratio of columns for each split, in each level. (default is 1)</param>
        /// <param name="l1Regularization">L1 regularization term on weights. Also known as RegAlpha. (default is 0)</param>
        /// <param name="l2Reguralization">L2 regularization term on weights. Also known as regLambda. (default is 1)</param>
        /// <param name="scalePosWeight">Balancing of positive and negative weights. (default is 1)</param>
        /// <param name="baseScore">The initial prediction score of all instances, global bias. (default is 0.5)</param>
        /// <param name="seed">Random number seed. (default is 0)</param>
        /// <param name="missing">Value in the data which needs to be present as a missing value. (default is NaN)</param>
        public RegressionXGBoostLearner(
            int maximumTreeDepth          = 3,
            double learningRate           = 0.1,
            int estimators                = 100,
            bool silent                   = true,
            RegressionObjective objective = RegressionObjective.LinearRegression,
            BoosterType boosterType       = BoosterType.GBTree,
            TreeMethod treeMethod         = TreeMethod.Auto,
            SamplerType samplerType       = SamplerType.Uniform,
            NormalizeType normalizeType   = NormalizeType.Tree,
            double dropoutRate            = 0.0,
            bool oneDrop                  = false,
            double skipDrop               = 0.0,
            int numberOfThreads           = -1,
            double gamma                  = 0,
            int minChildWeight            = 1,
            int maxDeltaStep              = 0,
            double subSample              = 1,
            double colSampleByTree        = 1,
            double colSampleByLevel       = 1,
            double l1Regularization       = 0,
            double l2Reguralization       = 1,
            double scalePosWeight         = 1,
            double baseScore              = 0.5,
            int seed       = 0,
            double missing = double.NaN)
        {
            ArgumentChecks.ThrowOnArgumentLessThan(nameof(maximumTreeDepth), maximumTreeDepth, 0);
            ArgumentChecks.ThrowOnArgumentLessThanOrHigherThan(nameof(learningRate), learningRate, 0, 1.0);
            ArgumentChecks.ThrowOnArgumentLessThan(nameof(estimators), estimators, 1);
            ArgumentChecks.ThrowOnArgumentLessThan(nameof(numberOfThreads), numberOfThreads, -1);
            ArgumentChecks.ThrowOnArgumentLessThan(nameof(gamma), gamma, 0);
            ArgumentChecks.ThrowOnArgumentLessThan(nameof(minChildWeight), minChildWeight, 0);
            ArgumentChecks.ThrowOnArgumentLessThan(nameof(maxDeltaStep), maxDeltaStep, 0);
            ArgumentChecks.ThrowOnArgumentLessThanOrHigherThan(nameof(subSample), subSample, 0, 1.0);
            ArgumentChecks.ThrowOnArgumentLessThanOrHigherThan(nameof(colSampleByTree), colSampleByTree, 0, 1.0);
            ArgumentChecks.ThrowOnArgumentLessThanOrHigherThan(nameof(colSampleByLevel), colSampleByLevel, 0, 1.0);
            ArgumentChecks.ThrowOnArgumentLessThan(nameof(l1Regularization), l1Regularization, 0);
            ArgumentChecks.ThrowOnArgumentLessThan(nameof(l2Reguralization), l2Reguralization, 0);
            ArgumentChecks.ThrowOnArgumentLessThan(nameof(scalePosWeight), scalePosWeight, 0);

            m_parameters[ParameterNames.MaxDepth]     = maximumTreeDepth;
            m_parameters[ParameterNames.LearningRate] = (float)learningRate;
            m_parameters[ParameterNames.Estimators]   = estimators;
            m_parameters[ParameterNames.Silent]       = silent;
            m_parameters[ParameterNames.objective]    = objective.ToXGBoostString();

            m_parameters[ParameterNames.Threads]          = numberOfThreads;
            m_parameters[ParameterNames.Gamma]            = (float)gamma;
            m_parameters[ParameterNames.MinChildWeight]   = minChildWeight;
            m_parameters[ParameterNames.MaxDeltaStep]     = maxDeltaStep;
            m_parameters[ParameterNames.SubSample]        = (float)subSample;
            m_parameters[ParameterNames.ColSampleByTree]  = (float)colSampleByTree;
            m_parameters[ParameterNames.ColSampleByLevel] = (float)colSampleByLevel;
            m_parameters[ParameterNames.RegAlpha]         = (float)l1Regularization;
            m_parameters[ParameterNames.RegLambda]        = (float)l2Reguralization;
            m_parameters[ParameterNames.ScalePosWeight]   = (float)scalePosWeight;

            m_parameters[ParameterNames.BaseScore]       = (float)baseScore;
            m_parameters[ParameterNames.Seed]            = seed;
            m_parameters[ParameterNames.Missing]         = (float)missing;
            m_parameters[ParameterNames.ExistingBooster] = null;
            m_parameters[ParameterNames.Booster]         = boosterType.ToXGBoostString();
            m_parameters[ParameterNames.TreeMethod]      = treeMethod.ToXGBoostString();

            m_parameters[ParameterNames.SampleType]    = samplerType.ToXGBoostString();
            m_parameters[ParameterNames.NormalizeType] = normalizeType.ToXGBoostString();
            m_parameters[ParameterNames.RateDrop]      = (float)dropoutRate;
            m_parameters[ParameterNames.OneDrop]       = oneDrop ? 1 : 0;
            m_parameters[ParameterNames.SkipDrop]      = (float)skipDrop;
        }
コード例 #16
0
        /// <summary>
        /// Classification learner for XGBoost. For classification problems,
        /// XGBoost requires that target values are sequntial and start at 0.
        /// </summary>
        /// <param name="maximumTreeDepth">Maximum tree depth for base learners. (default is 3)</param>
        /// <param name="learningRate">Boosting learning rate (xgb's "eta"). 0 indicates no limit. (default is 0.1)</param>
        /// <param name="estimators">Number of estimators to fit. (default is 100)</param>
        /// <param name="silent">Whether to print messages while running boosting. (default is false)</param>
        /// <param name="objective">Specify the learning task and the corresponding learning objective. (default is softmax)</param>
        /// <param name="boosterType"> which booster to use, can be gbtree, gblinear or dart.
        /// gbtree and dart use tree based model while gblinear uses linear function (default is gbtree)</param>
        /// <param name="treeMethod">The tree construction algorithm used in XGBoost. See reference paper: https://arxiv.org/abs/1603.02754. (default is auto)</param>
        /// <param name="samplerType">Type of sampling algorithm for DART. (default is uniform)</param>
        /// <param name="normalizeType">Type of normalization algorithm for DART. (default is tree)</param>
        /// <param name="dropoutRate">Dropout rate for DART (a fraction of previous trees to drop during the dropout). (default is 0.0)</param>
        /// <param name="oneDrop">When this is true, at least one tree is always dropped during the dropout.
        /// Allows Binomial-plus-one or epsilon-dropout from the original DART paper. (default is false)</param>
        /// <param name="skipDrop">Probability of skipping the dropout procedure during a boosting iteration. (default is 0.0)
        /// If a dropout is skipped, new trees are added in the same manner as gbtree.
        /// Note that non-zero skip_drop has higher priority than rate_drop or one_drop.</param>
        /// <param name="numberOfThreads">Number of parallel threads used to run xgboost. -1 means use all thread avialable. (default is -1)</param>
        /// <param name="gamma">Minimum loss reduction required to make a further partition on a leaf node of the tree. (default is 0) </param>
        /// <param name="minChildWeight">Minimum sum of instance weight(hessian) needed in a child. (default is 1)</param>
        /// <param name="maxDeltaStep">Maximum delta step we allow each tree's weight estimation to be. (default is 0)</param>
        /// <param name="subSample">Subsample ratio of the training instance. (default is 1)</param>
        /// <param name="colSampleByTree">Subsample ratio of columns when constructing each tree. (defualt is 1)</param>
        /// <param name="colSampleByLevel">Subsample ratio of columns for each split, in each level. (defualt is 1)</param>
        /// <param name="l1Regularization">L1 regularization term on weights. Also known as RegAlpha. (default is 0)</param>
        /// <param name="l2Reguralization">L2 regularization term on weights. Also known as regLambda. (default is 1)</param>
        /// <param name="scalePosWeight">Balancing of positive and negative weights. (default is 1)</param>
        /// <param name="baseScore">The initial prediction score of all instances, global bias. (default is 0.5)</param>
        /// <param name="seed">Random number seed. (defaukt is 0)</param>
        /// <param name="missing">Value in the data which needs to be present as a missing value. (default is NaN)</param>
        public ClassificationXGBoostLearner(int maximumTreeDepth = 3, double learningRate = 0.1, int estimators = 100,
                                            bool silent          = true,
                                            ClassificationObjective objective = ClassificationObjective.Softmax,
                                            BoosterType boosterType           = BoosterType.GBTree,
                                            TreeMethod treeMethod             = TreeMethod.Auto,
                                            SamplerType samplerType           = SamplerType.Uniform,
                                            NormalizeType normalizeType       = NormalizeType.Tree,
                                            double dropoutRate      = 0.0,
                                            bool oneDrop            = false,
                                            double skipDrop         = 0.0,
                                            int numberOfThreads     = -1, double gamma    = 0, int minChildWeight     = 1,
                                            int maxDeltaStep        = 0, double subSample = 1, double colSampleByTree = 1,
                                            double colSampleByLevel = 1, double l1Regularization = 0, double l2Reguralization = 1,
                                            double scalePosWeight   = 1, double baseScore        = 0.5, int seed = 0,
                                            double missing          = double.NaN)
        {
            ArgumentChecks.ThrowOnArgumentLessThan(nameof(maximumTreeDepth), maximumTreeDepth, 0);
            ArgumentChecks.ThrowOnArgumentLessThanOrHigherThan(nameof(learningRate), learningRate, 0, 1.0);
            ArgumentChecks.ThrowOnArgumentLessThan(nameof(estimators), estimators, 1);
            ArgumentChecks.ThrowOnArgumentLessThan(nameof(numberOfThreads), numberOfThreads, -1);
            ArgumentChecks.ThrowOnArgumentLessThan(nameof(gamma), gamma, 0);
            ArgumentChecks.ThrowOnArgumentLessThan(nameof(minChildWeight), minChildWeight, 0);
            ArgumentChecks.ThrowOnArgumentLessThan(nameof(maxDeltaStep), maxDeltaStep, 0);
            ArgumentChecks.ThrowOnArgumentLessThanOrHigherThan(nameof(subSample), subSample, 0, 1.0);
            ArgumentChecks.ThrowOnArgumentLessThanOrHigherThan(nameof(colSampleByTree), colSampleByTree, 0, 1.0);
            ArgumentChecks.ThrowOnArgumentLessThanOrHigherThan(nameof(colSampleByLevel), colSampleByLevel, 0, 1.0);
            ArgumentChecks.ThrowOnArgumentLessThan(nameof(l1Regularization), l1Regularization, 0);
            ArgumentChecks.ThrowOnArgumentLessThan(nameof(l2Reguralization), l2Reguralization, 0);
            ArgumentChecks.ThrowOnArgumentLessThan(nameof(scalePosWeight), scalePosWeight, 0);

            m_parameters[ParameterNames.MaxDepth]     = maximumTreeDepth;
            m_parameters[ParameterNames.LearningRate] = (float)learningRate;
            m_parameters[ParameterNames.Estimators]   = estimators;
            m_parameters[ParameterNames.Silent]       = silent;

            if (objective == ClassificationObjective.Softmax)
            {
                // SoftMax and SoftProp are the same objective,
                // but softprop returns probabilities.
                // So in order to always support PredictProbability,
                // always use softprop for multi-class.
                // Conversions to class labels is handled in the
                // ClassificationXGBoostModel.
                objective = ClassificationObjective.SoftProb;
            }

            m_parameters[ParameterNames.objective] = objective.ToXGBoostString();

            m_parameters[ParameterNames.Threads]          = numberOfThreads;
            m_parameters[ParameterNames.Gamma]            = (float)gamma;
            m_parameters[ParameterNames.MinChildWeight]   = minChildWeight;
            m_parameters[ParameterNames.MaxDeltaStep]     = maxDeltaStep;
            m_parameters[ParameterNames.SubSample]        = (float)subSample;
            m_parameters[ParameterNames.ColSampleByTree]  = (float)colSampleByTree;
            m_parameters[ParameterNames.ColSampleByLevel] = (float)colSampleByLevel;
            m_parameters[ParameterNames.RegAlpha]         = (float)l1Regularization;
            m_parameters[ParameterNames.RegLambda]        = (float)l2Reguralization;
            m_parameters[ParameterNames.ScalePosWeight]   = (float)scalePosWeight;

            m_parameters[ParameterNames.BaseScore]       = (float)baseScore;
            m_parameters[ParameterNames.Seed]            = seed;
            m_parameters[ParameterNames.Missing]         = (float)missing;
            m_parameters[ParameterNames.ExistingBooster] = null;
            m_parameters[ParameterNames.Booster]         = boosterType.ToXGBoostString();
            m_parameters[ParameterNames.TreeMethod]      = treeMethod.ToXGBoostString();

            m_parameters[ParameterNames.SampleType]    = samplerType.ToXGBoostString();
            m_parameters[ParameterNames.NormalizeType] = normalizeType.ToXGBoostString();
            m_parameters[ParameterNames.RateDrop]      = (float)dropoutRate;
            m_parameters[ParameterNames.OneDrop]       = oneDrop ? 1 : 0;
            m_parameters[ParameterNames.SkipDrop]      = (float)skipDrop;
        }