Ejemplo n.º 1
0
        /// <summary>
        /// Learns an XGBoost classification model.
        /// </summary>
        /// <param name="observations"></param>
        /// <param name="targets"></param>
        /// <param name="indices"></param>
        /// <returns></returns>
        public ClassificationXGBoostModel Learn(F64Matrix observations, double[] targets, int[] indices)
        {
            Checks.VerifyObservationsAndTargets(observations, targets);
            Checks.VerifyIndices(indices, observations, targets);

            var floatObservations = observations.ToFloatJaggedArray(indices);
            var floatTargets      = targets.ToFloat(indices);

            // Only specify XGBoost number of classes if the objective is multi-class.
            var objective = (string)m_parameters[ParameterNames.objective];

            if (objective == ClassificationObjective.Softmax.ToXGBoostString() || objective == ClassificationObjective.SoftProb.ToXGBoostString())
            {
                var numberOfClasses = floatTargets.Distinct().Count();
                m_parameters[ParameterNames.NumberOfClasses] = numberOfClasses;
            }

            using (var train = new DMatrix(floatObservations, floatTargets))
            {
                var booster    = new Booster(m_parameters.ToDictionary(v => v.Key, v => v.Value), train);
                var iterations = (int)m_parameters[ParameterNames.Estimators];

                for (int iteration = 0; iteration < iterations; iteration++)
                {
                    booster.Update(train, iteration);
                }

                return(new ClassificationXGBoostModel(booster));
            }
        }
Ejemplo n.º 2
0
        /// <summary>
        ///
        /// </summary>
        /// <param name="observations"></param>
        /// <returns></returns>
        public double[] Predict(F64Matrix observations)
        {
            var floatObservations = observations.ToFloatJaggedArray();

            using (var data = new DMatrix(floatObservations))
            {
                return(m_model.Predict(data).ToDouble());
            }
        }
Ejemplo n.º 3
0
        public void Conversions_ToFloatJaggedArray_Indexed()
        {
            var data = new double[]
            {
                10, 11,
                12, 13,
                14, 15
            };
            var matrix   = new F64Matrix(data, 3, 2);
            var actual   = matrix.ToFloatJaggedArray(new int[] { 0, 2 });
            var expected = new float[][]
            {
                new float[] { 10, 11 },
                new float[] { 14, 15 },
            };

            AssertArrays(actual, expected);
        }
        /// <summary>
        /// Learns an XGBoost regression model.
        /// </summary>
        /// <param name="observations"></param>
        /// <param name="targets"></param>
        /// <param name="indices"></param>
        /// <returns></returns>
        public RegressionXGBoostModel Learn(F64Matrix observations, double[] targets, int[] indices)
        {
            Checks.VerifyObservationsAndTargets(observations, targets);
            Checks.VerifyIndices(indices, observations, targets);

            var floatObservations = observations.ToFloatJaggedArray(indices);
            var floatTargets      = targets.ToFloat(indices);

            using (var train = new DMatrix(floatObservations, floatTargets))
            {
                var booster    = new Booster(m_parameters.ToDictionary(v => v.Key, v => v.Value), train);
                var iterations = (int)m_parameters[ParameterNames.Estimators];

                for (int iteration = 0; iteration < iterations; iteration++)
                {
                    booster.Update(train, iteration);
                }

                return(new RegressionXGBoostModel(booster));
            }
        }