/// <summary>
        /// Execute the training operation.
        /// </summary>
        /// <param name="indexer">The data indexer.</param>
        /// <returns>The trained <see cref="IMaxentModel"/> model.</returns>
        protected override IMaxentModel DoTrain(IDataIndexer indexer)
        {
            Display("Incorporating indexed data for training...");

            indexer.Execute();

            contexts           = indexer.GetContexts();
            values             = indexer.Values;
            numTimesEventsSeen = indexer.GetNumTimesEventsSeen();
            numEvents          = indexer.GetNumEvents();
            numUniqueEvents    = contexts.Length;

            outcomeLabels = indexer.GetOutcomeLabels();
            outcomeList   = indexer.GetOutcomeList();

            predLabels  = indexer.GetPredLabels();
            numPreds    = predLabels.Length;
            numOutcomes = outcomeLabels.Length;

            Display("done.");

            Display("\tNumber of Event Tokens: " + numUniqueEvents);
            Display("\t    Number of Outcomes: " + numOutcomes);
            Display("\t  Number of Predicates: " + numPreds);

            Display("Computing model parameters...");

            // ReSharper disable once CoVariantArrayConversion - we read the parameters ;)
            Context[] finalParameters = FindParameters();

            Display("...done.\n");


            return(new NaiveBayesModel(finalParameters, predLabels, outcomeLabels));
        }
Esempio n. 2
0
        /// <summary>
        /// Initializes a new instance of the <see cref="NegLogLikelihood"/> class.
        /// </summary>
        /// <param name="indexer">The data indexer.</param>
        public NegLogLikelihood(IDataIndexer indexer)
        {
            if (indexer == null)
            {
                throw new ArgumentNullException(nameof(indexer));
            }

            if (!indexer.Completed)
            {
                indexer.Execute();
            }

            values = indexer is OnePassRealValueDataIndexer ? indexer.Values : null;

            contexts           = indexer.GetContexts();
            outcomeList        = indexer.GetOutcomeList();
            numTimesEventsSeen = indexer.GetNumTimesEventsSeen();

            numOutcomes = indexer.GetOutcomeLabels().Length;
            numFeatures = indexer.GetPredLabels().Length;
            numContexts = contexts.Length;
            dimension   = numOutcomes * numFeatures;

            expectation = new double[numOutcomes];
            tempSums    = new double[numOutcomes];
            gradient    = new double[dimension];
        }
Esempio n. 3
0
        /// <summary>
        /// Train a model using the Perceptron algorithm.
        /// </summary>
        /// <param name="iterations">The number of Perceptron iterations to perform.</param>
        /// <param name="indexer">The object which will be used for event compilation.</param>
        /// <param name="cutoff">The number of times a predicate must occur to be used in a model.</param>
        /// <param name="useAverage"></param>
        /// <returns>The newly trained model, which can be used immediately or saved to disk using a <see cref="IO.PerceptronModelWriter"/> object.</returns>
        public AbstractModel TrainModel(int iterations, IDataIndexer indexer, int cutoff, bool useAverage)
        {
            Display("Incorporating indexed data for training...");

            info.Append("Trained using Perceptron algorithm.\n\n");

            // Executes the data indexer
            indexer.Execute();

            contexts           = indexer.GetContexts();
            values             = indexer.Values;
            numTimesEventsSeen = indexer.GetNumTimesEventsSeen();
            numEvents          = indexer.GetNumEvents();
            numUniqueEvents    = contexts.Length;

            outcomeLabels = indexer.GetOutcomeLabels();
            outcomeList   = indexer.GetOutcomeList();

            predLabels  = indexer.GetPredLabels();
            numPreds    = predLabels.Length;
            numOutcomes = outcomeLabels.Length;

            Display("\ndone.\n");

            info.Append("Number of Event Tokens: {0}\n" +
                        "    Number of Outcomes: {1}\n" +
                        "  Number of Predicates: {2}\n", numEvents, numOutcomes, numPreds);

            Display("\tNumber of Event Tokens: " + numUniqueEvents);
            Display("\t    Number of Outcomes: " + numOutcomes);
            Display("\t  Number of Predicates: " + numPreds);

            Display("Computing model parameters.");

            var finalParameters = FindParameters(iterations, useAverage);

            Display("\ndone.\n");

            // ReSharper disable once CoVariantArrayConversion
            return(new PerceptronModel(finalParameters, predLabels, outcomeLabels)
            {
                info = info
            });
        }
Esempio n. 4
0
        private bool CompareDoubleArray(double[] expected, double[] actual, IDataIndexer indexer, double tolerance)
        {
            var alignedActual = AlignDoubleArrayForTestData(actual, indexer.GetPredLabels(), indexer.GetOutcomeLabels());

            if (expected.Length != alignedActual.Length)
            {
                return(false);
            }

            for (var i = 0; i < alignedActual.Length; i++)
            {
                if (Math.Abs(alignedActual[i] - expected[i]) > tolerance)
                {
                    return(false);
                }
            }

            return(true);
        }
Esempio n. 5
0
        /// <summary>
        /// Train a model using the GIS algorithm.
        /// </summary>
        /// <param name="iterations">The number of GIS iterations to perform.</param>
        /// <param name="di">The data indexer used to compress events in memory.</param>
        /// <param name="modelPrior">The prior distribution used to train this model.</param>
        /// <param name="modelCutoff">The number of times a feature must occur to be included.</param>
        /// <param name="threads">The number of threads used to train this model.</param>
        /// <returns>The newly trained model, which can be used immediately or saved to disk using an <see cref="GISModelWriter"/> object.</returns>
        public GISModel TrainModel(int iterations, IDataIndexer di, IPrior modelPrior, int modelCutoff, int threads) {

            if (threads <= 0)
                throw new ArgumentOutOfRangeException("threads", threads, @"Threads must be at least one or greater.");

            modelExpects = new MutableContext[threads][];

            info.Append("Trained using GIS algorithm.\n\n");

            // Executes the data indexer
            di.Execute();

            // Incorporate all of the needed info.
            Display("Incorporating indexed data for training...");
            contexts = di.GetContexts();
            values = di.Values;
            cutoff = modelCutoff;
            predicateCounts = di.GetPredCounts();
            numTimesEventsSeen = di.GetNumTimesEventsSeen();
            numUniqueEvents = contexts.Length;
            prior = modelPrior;

            // determine the correction constant and its inverse
            double correctionConstant = 0;
            for (int ci = 0; ci < contexts.Length; ci++) {
                if (values == null || values[ci] == null) {
                    if (contexts[ci].Length > correctionConstant) {
                        correctionConstant = contexts[ci].Length;
                    }
                } else {
                    var cl = values[ci][0];
                    for (var vi = 1; vi < values[ci].Length; vi++) {
                        cl += values[ci][vi];
                    }

                    if (cl > correctionConstant) {
                        correctionConstant = cl;
                    }
                }
            }

            Display("done.");

            outcomeLabels = di.GetOutcomeLabels();
            outcomeList = di.GetOutcomeList();
            numOutcomes = outcomeLabels.Length;

            predLabels = di.GetPredLabels();
            prior.SetLabels(outcomeLabels, predLabels);
            numPreds = predLabels.Length;

            info.Append("Number of Event Tokens: {0}\n", numUniqueEvents);
            info.Append("    Number of Outcomes: {0}\n", numOutcomes);
            info.Append("  Number of Predicates: {0}\n", numPreds);

            Display("\tNumber of Event Tokens: " + numUniqueEvents);
            Display("\t    Number of Outcomes: " + numOutcomes);
            Display("\t  Number of Predicates: " + numPreds);

            // set up feature arrays
            //var predCount = new float[numPreds][numOutcomes];

            var predCount = new float[numPreds][];

            for (int ti = 0; ti < numUniqueEvents; ti++) {
                for (int j = 0; j < contexts[ti].Length; j++) {

                    if (predCount[contexts[ti][j]] == null) {
                        predCount[contexts[ti][j]] = new float[numOutcomes];
                    }

                    if (values != null && values[ti] != null) {
                        predCount[contexts[ti][j]][outcomeList[ti]] += numTimesEventsSeen[ti]*values[ti][j];
                    } else {
                        predCount[contexts[ti][j]][outcomeList[ti]] += numTimesEventsSeen[ti];
                    }
                }
            }

            // ReSharper disable once RedundantAssignment
            di = null;

            // Get the observed expectations of the features. Strictly speaking,
            // we should divide the counts by the number of Tokens, but because of
            // the way the model's expectations are approximated in the
            // implementation, this is canceled out when we compute the next
            // iteration of a parameter, making the extra divisions wasteful.
            param = new MutableContext[numPreds];
            for (var i = 0; i < modelExpects.Length; i++)
                modelExpects[i] = new MutableContext[numPreds];

            observedExpects = new MutableContext[numPreds];


            // The model does need the correction constant and the correction feature. The correction constant
            // is only needed during training, and the correction feature is not necessary.
            // For compatibility reasons the model contains form now on a correction constant of 1,
            // and a correction param 0.
            // ReSharper disable once CoVariantArrayConversion
            evalParams = new EvalParameters(param, 0, 1, numOutcomes);

            var activeOutcomes = new int[numOutcomes];
            var allOutcomesPattern = new int[numOutcomes];
            for (var oi = 0; oi < numOutcomes; oi++) {
                allOutcomesPattern[oi] = oi;
            }
            for (var pi = 0; pi < numPreds; pi++) {
                var numActiveOutcomes = 0;
                int[] outcomePattern;
                if (Smoothing) {
                    numActiveOutcomes = numOutcomes;
                    outcomePattern = allOutcomesPattern;
                } else {
                    //determine active outcomes
                    for (var oi = 0; oi < numOutcomes; oi++) {
                        if (predCount[pi][oi] > 0 && predicateCounts[pi] >= cutoff) {
                            activeOutcomes[numActiveOutcomes] = oi;
                            numActiveOutcomes++;
                        }
                    }
                    if (numActiveOutcomes == numOutcomes) {
                        outcomePattern = allOutcomesPattern;
                    } else {
                        outcomePattern = new int[numActiveOutcomes];
                        for (var aoi = 0; aoi < numActiveOutcomes; aoi++) {
                            outcomePattern[aoi] = activeOutcomes[aoi];
                        }
                    }
                }
                param[pi] = new MutableContext(outcomePattern, new double[numActiveOutcomes]);

                foreach (MutableContext[] me in modelExpects)
                    me[pi] = new MutableContext(outcomePattern, new double[numActiveOutcomes]);

                observedExpects[pi] = new MutableContext(outcomePattern, new double[numActiveOutcomes]);
                for (var aoi = 0; aoi < numActiveOutcomes; aoi++) {
                    var oi = outcomePattern[aoi];
                    param[pi].SetParameter(aoi, 0.0);

                    foreach (var modelExpect in modelExpects) {
                        modelExpect[pi].SetParameter(aoi, 0.0);
                    }

                    if (predCount[pi][oi] > 0) {
                        observedExpects[pi].SetParameter(aoi, predCount[pi][oi]);
                    } else if (Smoothing) {
                        observedExpects[pi].SetParameter(aoi, SmoothingObservation);
                    }
                }
            }

            Display("...done.");

            /***************** Find the parameters ************************/
            if (threads == 1)
                Display("Computing model parameters ...");
            else
                Display("Computing model parameters in " + threads + " threads...");

            FindParameters(iterations, correctionConstant);

            /*************** Create and return the model ******************/

            // To be compatible with old models the correction constant is always 1

            // ReSharper disable once CoVariantArrayConversion
            return new GISModel(param, predLabels, outcomeLabels, 1, evalParams.CorrectionParam) {
                info = TrainingInfo
            };
        }
Esempio n. 6
0
        /// <summary>
        /// Train a model using the GIS algorithm.
        /// </summary>
        /// <param name="iterations">The number of GIS iterations to perform.</param>
        /// <param name="di">The data indexer used to compress events in memory.</param>
        /// <param name="modelPrior">The prior distribution used to train this model.</param>
        /// <param name="modelCutoff">The number of times a feature must occur to be included.</param>
        /// <param name="threads">The number of threads used to train this model.</param>
        /// <returns>The newly trained model, which can be used immediately or saved to disk using an <see cref="GISModelWriter"/> object.</returns>
        public GISModel TrainModel(int iterations, IDataIndexer di, IPrior modelPrior, int modelCutoff, int threads)
        {
            if (threads <= 0)
            {
                throw new ArgumentOutOfRangeException("threads", threads, @"Threads must be at least one or greater.");
            }

            modelExpects = new MutableContext[threads][];

            info.Append("Trained using GIS algorithm.\n\n");

            // Executes the data indexer
            di.Execute();

            // Incorporate all of the needed info.
            Display("Incorporating indexed data for training...");
            contexts           = di.GetContexts();
            values             = di.Values;
            cutoff             = modelCutoff;
            predicateCounts    = di.GetPredCounts();
            numTimesEventsSeen = di.GetNumTimesEventsSeen();
            numUniqueEvents    = contexts.Length;
            prior = modelPrior;

            // determine the correction constant and its inverse
            double correctionConstant = 0;

            for (int ci = 0; ci < contexts.Length; ci++)
            {
                if (values == null || values[ci] == null)
                {
                    if (contexts[ci].Length > correctionConstant)
                    {
                        correctionConstant = contexts[ci].Length;
                    }
                }
                else
                {
                    var cl = values[ci][0];
                    for (var vi = 1; vi < values[ci].Length; vi++)
                    {
                        cl += values[ci][vi];
                    }

                    if (cl > correctionConstant)
                    {
                        correctionConstant = cl;
                    }
                }
            }

            Display("done.");

            outcomeLabels = di.GetOutcomeLabels();
            outcomeList   = di.GetOutcomeList();
            numOutcomes   = outcomeLabels.Length;

            predLabels = di.GetPredLabels();
            prior.SetLabels(outcomeLabels, predLabels);
            numPreds = predLabels.Length;

            info.Append("Number of Event Tokens: {0}\n", numUniqueEvents);
            info.Append("    Number of Outcomes: {0}\n", numOutcomes);
            info.Append("  Number of Predicates: {0}\n", numPreds);

            Display("\tNumber of Event Tokens: " + numUniqueEvents);
            Display("\t    Number of Outcomes: " + numOutcomes);
            Display("\t  Number of Predicates: " + numPreds);

            // set up feature arrays
            //var predCount = new float[numPreds][numOutcomes];

            var predCount = new float[numPreds][];

            for (int ti = 0; ti < numUniqueEvents; ti++)
            {
                for (int j = 0; j < contexts[ti].Length; j++)
                {
                    if (predCount[contexts[ti][j]] == null)
                    {
                        predCount[contexts[ti][j]] = new float[numOutcomes];
                    }

                    if (values != null && values[ti] != null)
                    {
                        predCount[contexts[ti][j]][outcomeList[ti]] += numTimesEventsSeen[ti] * values[ti][j];
                    }
                    else
                    {
                        predCount[contexts[ti][j]][outcomeList[ti]] += numTimesEventsSeen[ti];
                    }
                }
            }

            // ReSharper disable once RedundantAssignment
            di = null;

            // Get the observed expectations of the features. Strictly speaking,
            // we should divide the counts by the number of Tokens, but because of
            // the way the model's expectations are approximated in the
            // implementation, this is canceled out when we compute the next
            // iteration of a parameter, making the extra divisions wasteful.
            param = new MutableContext[numPreds];
            for (var i = 0; i < modelExpects.Length; i++)
            {
                modelExpects[i] = new MutableContext[numPreds];
            }

            observedExpects = new MutableContext[numPreds];


            // The model does need the correction constant and the correction feature. The correction constant
            // is only needed during training, and the correction feature is not necessary.
            // For compatibility reasons the model contains form now on a correction constant of 1,
            // and a correction param 0.
            // ReSharper disable once CoVariantArrayConversion
            evalParams = new EvalParameters(param, 0, 1, numOutcomes);

            var activeOutcomes     = new int[numOutcomes];
            var allOutcomesPattern = new int[numOutcomes];

            for (var oi = 0; oi < numOutcomes; oi++)
            {
                allOutcomesPattern[oi] = oi;
            }
            for (var pi = 0; pi < numPreds; pi++)
            {
                var   numActiveOutcomes = 0;
                int[] outcomePattern;
                if (Smoothing)
                {
                    numActiveOutcomes = numOutcomes;
                    outcomePattern    = allOutcomesPattern;
                }
                else
                {
                    //determine active outcomes
                    for (var oi = 0; oi < numOutcomes; oi++)
                    {
                        if (predCount[pi][oi] > 0 && predicateCounts[pi] >= cutoff)
                        {
                            activeOutcomes[numActiveOutcomes] = oi;
                            numActiveOutcomes++;
                        }
                    }
                    if (numActiveOutcomes == numOutcomes)
                    {
                        outcomePattern = allOutcomesPattern;
                    }
                    else
                    {
                        outcomePattern = new int[numActiveOutcomes];
                        for (var aoi = 0; aoi < numActiveOutcomes; aoi++)
                        {
                            outcomePattern[aoi] = activeOutcomes[aoi];
                        }
                    }
                }
                param[pi] = new MutableContext(outcomePattern, new double[numActiveOutcomes]);

                foreach (MutableContext[] me in modelExpects)
                {
                    me[pi] = new MutableContext(outcomePattern, new double[numActiveOutcomes]);
                }

                observedExpects[pi] = new MutableContext(outcomePattern, new double[numActiveOutcomes]);
                for (var aoi = 0; aoi < numActiveOutcomes; aoi++)
                {
                    var oi = outcomePattern[aoi];
                    param[pi].SetParameter(aoi, 0.0);

                    foreach (var modelExpect in modelExpects)
                    {
                        modelExpect[pi].SetParameter(aoi, 0.0);
                    }

                    if (predCount[pi][oi] > 0)
                    {
                        observedExpects[pi].SetParameter(aoi, predCount[pi][oi]);
                    }
                    else if (Smoothing)
                    {
                        observedExpects[pi].SetParameter(aoi, SmoothingObservation);
                    }
                }
            }

            Display("...done.");

            /***************** Find the parameters ************************/
            if (threads == 1)
            {
                Display("Computing model parameters ...");
            }
            else
            {
                Display("Computing model parameters in " + threads + " threads...");
            }

            FindParameters(iterations, correctionConstant);

            /*************** Create and return the model ******************/

            // To be compatible with old models the correction constant is always 1

            // ReSharper disable once CoVariantArrayConversion
            return(new GISModel(param, predLabels, outcomeLabels, 1, evalParams.CorrectionParam)
            {
                info = TrainingInfo
            });
        }
Esempio n. 7
0
        /// <summary>
        /// Execute the training operation.
        /// </summary>
        /// <param name="iterations">The number of iterations.</param>
        /// <param name="indexer">The data indexer.</param>
        /// <returns>The trained <see cref="IMaxentModel" /> model.</returns>
        /// <exception cref="System.ArgumentOutOfRangeException">iterations</exception>
        /// <exception cref="System.ArgumentNullException">indexer</exception>
        /// <exception cref="System.InvalidOperationException">The number of threads is invalid.</exception>
        public QNModel TrainModel(int iterations, IDataIndexer indexer)
        {
            if (iterations < 0)
            {
                throw new ArgumentOutOfRangeException(nameof(iterations));
            }

            if (indexer == null)
            {
                throw new ArgumentNullException(nameof(indexer));
            }

            IFunction function;

            if (threads == 1)
            {
                Display("Computing model parameters ...");
                function = new NegLogLikelihood(indexer);
            }
            else if (threads > 1)
            {
                Display("Computing model parameters in " + threads + " threads ...");
                function = new ParallelNegLogLikelihood(indexer, threads);
            }
            else
            {
                throw new InvalidOperationException("The number of threads is invalid.");
            }

            if (!indexer.Completed)
            {
                indexer.Execute();
            }

            var minimizer = new QNMinimizer(l1Cost, l2Cost, iterations, updates, maxFctEval, Monitor)
            {
                Evaluator = new QNModelEvaluator(indexer)
            };

            // minimized parameters
            var mp = minimizer.Minimize(function);

            // construct model with trained parameters

            var predLabels  = indexer.GetPredLabels();
            var nPredLabels = predLabels.Length;

            var outcomeNames = indexer.GetOutcomeLabels();
            var nOutcomes    = outcomeNames.Length;

            var parameters = new Context[nPredLabels];

            for (var ci = 0; ci < parameters.Length; ci++)
            {
                var outcomePattern = new List <int>(nOutcomes);
                var alpha          = new List <double>(nOutcomes);
                for (var oi = 0; oi < nOutcomes; oi++)
                {
                    var val = mp[oi * nPredLabels + ci];
                    outcomePattern.Add(oi);
                    alpha.Add(val);
                }
                parameters[ci] = new Context(outcomePattern.ToArray(), alpha.ToArray());
            }
            return(new QNModel(parameters, predLabels, outcomeNames));
        }
Esempio n. 8
0
        /// <summary>
        /// Train a model using the Perceptron algorithm.
        /// </summary>
        /// <param name="iterations">The number of Perceptron iterations to perform.</param>
        /// <param name="indexer">The object which will be used for event compilation.</param>
        /// <param name="cutoff">The number of times a predicate must occur to be used in a model.</param>
        /// <param name="useAverage"></param>
        /// <returns>The newly trained model, which can be used immediately or saved to disk using a <see cref="IO.PerceptronModelWriter"/> object.</returns>
        public AbstractModel TrainModel(int iterations, IDataIndexer indexer, int cutoff, bool useAverage) {
            Display("Incorporating indexed data for training...");

            info.Append("Trained using Perceptron algorithm.\n\n");

            // Executes the data indexer
            indexer.Execute();

            contexts = indexer.GetContexts();
            values = indexer.Values;
            numTimesEventsSeen = indexer.GetNumTimesEventsSeen();
            numEvents = indexer.GetNumEvents();
            numUniqueEvents = contexts.Length;

            outcomeLabels = indexer.GetOutcomeLabels();
            outcomeList = indexer.GetOutcomeList();

            predLabels = indexer.GetPredLabels();
            numPreds = predLabels.Length;
            numOutcomes = outcomeLabels.Length;

            Display("\ndone.\n");

            info.Append("Number of Event Tokens: {0}\n" +
                        "    Number of Outcomes: {1}\n" +
                        "  Number of Predicates: {2}\n", numEvents, numOutcomes, numPreds);

            Display("\tNumber of Event Tokens: " + numUniqueEvents);
            Display("\t    Number of Outcomes: " + numOutcomes);
            Display("\t  Number of Predicates: " + numPreds);

            Display("Computing model parameters.");

            var finalParameters = FindParameters(iterations, useAverage);

            Display("\ndone.\n");

            // ReSharper disable once CoVariantArrayConversion
            return new PerceptronModel(finalParameters, predLabels, outcomeLabels) {
                info = info
            };
        }