// static methods
        private static void ExecuteOneTrainingBatch(SentimentModel model, IList <Tree> trainingBatch, double[] sumGradSquare)
        {
            SentimentCostAndGradient gcFunc = new SentimentCostAndGradient(model, trainingBatch);

            double[] theta = model.ParamsToVector();
            // AdaGrad
            double eps = 1e-3;

            // TODO: do we want to iterate multiple times per batch?
            double[] gradf    = gcFunc.DerivativeAt(theta);
            double   currCost = gcFunc.ValueAt(theta);

            log.Info("batch cost: " + currCost);
            for (int feature = 0; feature < gradf.Length; feature++)
            {
                sumGradSquare[feature] = sumGradSquare[feature] + gradf[feature] * gradf[feature];
                theta[feature]         = theta[feature] - (model.op.trainOptions.learningRate * gradf[feature] / (Math.Sqrt(sumGradSquare[feature]) + eps));
            }
            model.VectorToParams(theta);
        }
        public static bool RunGradientCheck(SentimentModel model, IList <Tree> trees)
        {
            SentimentCostAndGradient gcFunc = new SentimentCostAndGradient(model, trees);

            return(gcFunc.GradientCheck(model.TotalParamSize(), 50, model.ParamsToVector()));
        }