public void VerifyResult(string name, MethodInfo methodInfo, object expectedResult)
        {
            var input    = InputDataManager.GetInputArgs(methodInfo);
            var instance = methodInfo.IsStatic ? null : Activator.CreateInstance(methodInfo.DeclaringType);

            var result = methodInfo.Invoke(instance, input);

            result.Should().Be(expectedResult);
        }
            /// <summary>
            /// This method puts all of the buffered instances in array of pointers to pass it to SymSGDNative.
            /// </summary>
            /// <param name="inputDataManager">The buffered data</param>
            /// <param name="tuneLR">Specifies if SymSGD should tune alpha automatically</param>
            /// <param name="lr">Initial learning rate</param>
            /// <param name="l2Const"></param>
            /// <param name="piw"></param>
            /// <param name="weightVector">The storage for the weight vector</param>
            /// <param name="bias">bias</param>
            /// <param name="numFeatres">Number of features</param>
            /// <param name="numPasses">Number of passes</param>
            /// <param name="numThreads">Number of threads</param>
            /// <param name="tuneNumLocIter">Specifies if SymSGD should tune numLocIter automatically</param>
            /// <param name="numLocIter">Number of thread local iterations of SGD before combining with the global model</param>
            /// <param name="tolerance">Tolerance for the amount of decrease in the total loss in consecutive passes</param>
            /// <param name="needShuffle">Specifies if data needs to be shuffled</param>
            /// <param name="shouldInitialize">Specifies if this is the first time to run SymSGD</param>
            /// <param name="stateGCHandle"></param>
            /// <param name="info"></param>
            public static void LearnAll(InputDataManager inputDataManager, bool tuneLR,
                                        ref float lr, float l2Const, float piw, Span <float> weightVector, ref float bias, int numFeatres, int numPasses,
                                        int numThreads, bool tuneNumLocIter, ref int numLocIter, float tolerance, bool needShuffle, bool shouldInitialize, GCHandle stateGCHandle, ChannelCallBack info)
            {
                inputDataManager.PrepareCursoring();

                int totalNumInstances = inputDataManager.Count;

                // Each instance has a pointer to indices array and a pointer to values array
                int *[]   arrayIndicesPointers = new int *[totalNumInstances];
                float *[] arrayValuesPointers  = new float *[totalNumInstances];
                // Labels of the instances
                float[] instLabels = new float[totalNumInstances];
                // Sizes of each inst
                int[] instSizes = new int[totalNumInstances];

                int instanceIndex = 0;

                // Going through the buffer to set the properties and the pointers
                while (inputDataManager.GiveNextInstance(out InstanceProperties? prop, out GCHandle? indicesGcHandle, out int indicesStartIndex, out GCHandle? valuesGcHandle, out int valuesStartIndex))
                {
                    if (prop.Value.IsDense)
                    {
                        arrayIndicesPointers[instanceIndex] = null;
                    }
                    else
                    {
                        int *pIndicesArray = (int *)indicesGcHandle.Value.AddrOfPinnedObject();
                        arrayIndicesPointers[instanceIndex] = &pIndicesArray[indicesStartIndex];
                    }
                    float *pValuesArray = (float *)valuesGcHandle.Value.AddrOfPinnedObject();
                    arrayValuesPointers[instanceIndex] = &pValuesArray[valuesStartIndex];

                    instLabels[instanceIndex] = prop.Value.Label;
                    instSizes[instanceIndex]  = prop.Value.FeatureCount;
                    instanceIndex++;
                }

                fixed(float *pweightVector = &weightVector[0])
                fixed(int **pIndicesPointer  = &arrayIndicesPointers[0])
                fixed(float **pValuesPointer = &arrayValuesPointers[0])
                fixed(int *pInstSizes        = &instSizes[0])
                fixed(float *pInstLabels     = &instLabels[0])
                {
                    LearnAll(totalNumInstances, pInstSizes, pIndicesPointer, pValuesPointer, pInstLabels, tuneLR, ref lr, l2Const, piw,
                             pweightVector, ref bias, numFeatres, numPasses, numThreads, tuneNumLocIter, ref numLocIter, tolerance, needShuffle,
                             shouldInitialize, (State *)stateGCHandle.AddrOfPinnedObject(), info);
                }
            }
        private TPredictor TrainCore(IChannel ch, RoleMappedData data, LinearModelParameters predictor, int weightSetCount)
        {
            int numFeatures   = data.Schema.Feature.Value.Type.GetVectorSize();
            var cursorFactory = new FloatLabelCursor.Factory(data, CursOpt.Label | CursOpt.Features);
            int numThreads    = 1;

            ch.CheckUserArg(numThreads > 0, nameof(_options.NumberOfThreads),
                            "The number of threads must be either null or a positive integer.");

            var             positiveInstanceWeight = _options.PositiveInstanceWeight;
            VBuffer <float> weights = default;
            float           bias    = 0.0f;

            if (predictor != null)
            {
                predictor.GetFeatureWeights(ref weights);
                VBufferUtils.Densify(ref weights);
                bias = predictor.Bias;
            }
            else
            {
                weights = VBufferUtils.CreateDense <float>(numFeatures);
            }

            var weightsEditor = VBufferEditor.CreateFromBuffer(ref weights);

            // Reference: Parasail. SymSGD.
            bool tuneLR = _options.LearningRate == null;
            var  lr     = _options.LearningRate ?? 1.0f;

            bool tuneNumLocIter = (_options.UpdateFrequency == null);
            var  numLocIter     = _options.UpdateFrequency ?? 1;

            var l2Const = _options.L2Regularization;
            var piw     = _options.PositiveInstanceWeight;

            // This is state of the learner that is shared with the native code.
            State    state         = new State();
            GCHandle stateGCHandle = default;

            try
            {
                stateGCHandle = GCHandle.Alloc(state, GCHandleType.Pinned);

                state.TotalInstancesProcessed = 0;
                using (InputDataManager inputDataManager = new InputDataManager(this, cursorFactory, ch))
                {
                    bool shouldInitialize = true;
                    using (var pch = Host.StartProgressChannel("Preprocessing"))
                        inputDataManager.LoadAsMuchAsPossible();

                    int iter = 0;
                    if (inputDataManager.IsFullyLoaded)
                    {
                        ch.Info("Data fully loaded into memory.");
                    }
                    using (var pch = Host.StartProgressChannel("Training"))
                    {
                        if (inputDataManager.IsFullyLoaded)
                        {
                            pch.SetHeader(new ProgressHeader(new[] { "iterations" }),
                                          entry => entry.SetProgress(0, state.PassIteration, _options.NumberOfIterations));
                            // If fully loaded, call the SymSGDNative and do not come back until learned for all iterations.
                            Native.LearnAll(inputDataManager, tuneLR, ref lr, l2Const, piw, weightsEditor.Values, ref bias, numFeatures,
                                            _options.NumberOfIterations, numThreads, tuneNumLocIter, ref numLocIter, _options.Tolerance, _options.Shuffle, shouldInitialize,
                                            stateGCHandle, ch.Info);
                            shouldInitialize = false;
                        }
                        else
                        {
                            pch.SetHeader(new ProgressHeader(new[] { "iterations" }),
                                          entry => entry.SetProgress(0, iter, _options.NumberOfIterations));

                            // Since we loaded data in batch sizes, multiple passes over the loaded data is feasible.
                            int numPassesForABatch = inputDataManager.Count / 10000;
                            while (iter < _options.NumberOfIterations)
                            {
                                // We want to train on the final passes thoroughly (without learning on the same batch multiple times)
                                // This is for fine tuning the AUC. Experimentally, we found that 1 or 2 passes is enough
                                int numFinalPassesToTrainThoroughly = 2;
                                // We also do not want to learn for more passes than what the user asked
                                int numPassesForThisBatch = Math.Min(numPassesForABatch, _options.NumberOfIterations - iter - numFinalPassesToTrainThoroughly);
                                // If all of this leaves us with 0 passes, then set numPassesForThisBatch to 1
                                numPassesForThisBatch = Math.Max(1, numPassesForThisBatch);
                                state.PassIteration   = iter;
                                Native.LearnAll(inputDataManager, tuneLR, ref lr, l2Const, piw, weightsEditor.Values, ref bias, numFeatures,
                                                numPassesForThisBatch, numThreads, tuneNumLocIter, ref numLocIter, _options.Tolerance, _options.Shuffle, shouldInitialize,
                                                stateGCHandle, ch.Info);
                                shouldInitialize = false;

                                // Check if we are done with going through the data
                                if (inputDataManager.FinishedTheLoad)
                                {
                                    iter += numPassesForThisBatch;
                                    // Check if more passes are left
                                    if (iter < _options.NumberOfIterations)
                                    {
                                        inputDataManager.RestartLoading(_options.Shuffle, Host);
                                    }
                                }

                                // If more passes are left, load as much as possible
                                if (iter < _options.NumberOfIterations)
                                {
                                    inputDataManager.LoadAsMuchAsPossible();
                                }
                            }
                        }

                        // Maps back the dense features that are mislocated
                        if (numThreads > 1)
                        {
                            Native.MapBackWeightVector(weightsEditor.Values, stateGCHandle);
                        }
                        Native.DeallocateSequentially(stateGCHandle);
                    }
                }
            }
            finally
            {
                if (stateGCHandle.IsAllocated)
                {
                    stateGCHandle.Free();
                }
            }
            return(CreatePredictor(weights, bias));
        }