예제 #1
0
        private void TrainingStats(EvalParameters evalParams)
        {
            var numCorrect = 0;

            for (var ei = 0; ei < numUniqueEvents; ei++)
            {
                for (var ni = 0; ni < numTimesEventsSeen[ei]; ni++)
                {
                    var modelDistribution = new double[numOutcomes];

                    PerceptronModel.Eval(
                        contexts[ei],
                        values?[ei],
                        modelDistribution,
                        evalParams,
                        false);

                    var max = MaxIndex(modelDistribution);
                    if (max == outcomeList[ei])
                    {
                        numCorrect++;
                    }
                }
            }
            var trainingAccuracy = (double)numCorrect / numEvents;

            info.Append("        Correct Events: {0}\n" +
                        "          Total Events: {1}\n" +
                        "              Accuracy: {2}\n", numCorrect, numEvents, trainingAccuracy);

            Display("\nPerceptron training complete:\n");
            Display("\t Correct Events : " + numCorrect);
            Display("\t   Total Events : " + numEvents);
            Display("\t       Accuracy : " + trainingAccuracy);
        }
예제 #2
0
        private MutableContext[] FindParameters(int iterations, bool useAverage)
        {
            info.Append("  Number of Iterations: {0}\n", iterations);

            Display("\nPerforming " + iterations + " iterations.\n");

            var allOutcomesPattern = new int[numOutcomes];

            for (var oi = 0; oi < numOutcomes; oi++)
            {
                allOutcomesPattern[oi] = oi;
            }

            /* Stores the estimated parameter value of each predicate during iteration. */
            var param = new MutableContext[numPreds];

            for (var pi = 0; pi < numPreds; pi++)
            {
                param[pi] = new MutableContext(allOutcomesPattern, new double[numOutcomes]);
                for (var aoi = 0; aoi < numOutcomes; aoi++)
                {
                    param[pi].SetParameter(aoi, 0.0);
                }
            }

            // ReSharper disable once CoVariantArrayConversion
            var evalParams = new EvalParameters(param, numOutcomes);

            // Stores the sum of parameter values of each predicate over many iterations.
            var summedParams = new MutableContext[numPreds];

            if (useAverage)
            {
                for (var pi = 0; pi < numPreds; pi++)
                {
                    summedParams[pi] = new MutableContext(allOutcomesPattern, new double[numOutcomes]);
                    for (var aoi = 0; aoi < numOutcomes; aoi++)
                    {
                        summedParams[pi].SetParameter(aoi, 0.0);
                    }
                }
            }

            // Keep track of the previous three accuracies. The difference of
            // the mean of these and the current training set accuracy is used
            // with tolerance to decide whether to stop.
            var prevAccuracy1 = 0.0;
            var prevAccuracy2 = 0.0;
            var prevAccuracy3 = 0.0;

            // A counter for the denominator for averaging.
            var numTimesSummed = 0;

            double stepSize = 1;

            for (var i = 1; i <= iterations; i++)
            {
                // Decrease the step size by a small amount.
                if (stepSizeDecrease > 0)
                {
                    stepSize *= 1 - stepSizeDecrease;
                }


                if (Monitor != null && Monitor.Token.CanBeCanceled)
                {
                    Monitor.Token.ThrowIfCancellationRequested();
                }

                var numCorrect = 0;

                for (var ei = 0; ei < numUniqueEvents; ei++)
                {
                    var targetOutcome = outcomeList[ei];

                    for (var ni = 0; ni < numTimesEventsSeen[ei]; ni++)
                    {
                        // Compute the model's prediction according to the current parameters.
                        var modelDistribution = new double[numOutcomes];

                        PerceptronModel.Eval(
                            contexts[ei],
                            values != null ? values[ei] : null,
                            modelDistribution,
                            evalParams, false);

                        var maxOutcome = MaxIndex(modelDistribution);

                        // If the predicted outcome is different from the target
                        // outcome, do the standard update: boost the parameters
                        // associated with the target and reduce those associated
                        // with the incorrect predicted outcome.
                        if (maxOutcome != targetOutcome)
                        {
                            for (var ci = 0; ci < contexts[ei].Length; ci++)
                            {
                                var pi = contexts[ei][ci];
                                if (values == null)
                                {
                                    param[pi].UpdateParameter(targetOutcome, stepSize);
                                    param[pi].UpdateParameter(maxOutcome, -stepSize);
                                }
                                else
                                {
                                    param[pi].UpdateParameter(targetOutcome, stepSize * values[ei][ci]);
                                    param[pi].UpdateParameter(maxOutcome, -stepSize * values[ei][ci]);
                                }
                            }
                        }

                        // Update the counts for accuracy.
                        if (maxOutcome == targetOutcome)
                        {
                            numCorrect++;
                        }
                    }
                }

                // Calculate the training accuracy and display.
                var trainingAccuracy = (double)numCorrect / numEvents;
                Display($"{i,-4} {numCorrect} of {numEvents} - {trainingAccuracy}");


                // TODO: Make averaging configurable !!!

                bool doAveraging;

                if (useAverage && UseSkippedAveraging && (i < 20 || IsPerfectSquare(i)))
                {
                    doAveraging = true;
                }
                else if (useAverage)
                {
                    doAveraging = true;
                }
                else
                {
                    doAveraging = false;
                }

                if (doAveraging)
                {
                    numTimesSummed++;
                    for (var pi = 0; pi < numPreds; pi++)
                    {
                        for (var aoi = 0; aoi < numOutcomes; aoi++)
                        {
                            summedParams[pi].UpdateParameter(aoi, param[pi].Parameters[aoi]);
                        }
                    }
                }

                // If the tolerance is greater than the difference between the
                // current training accuracy and all of the previous three
                // training accuracies, stop training.
                if (Math.Abs(prevAccuracy1 - trainingAccuracy) < tolerance &&
                    Math.Abs(prevAccuracy2 - trainingAccuracy) < tolerance &&
                    Math.Abs(prevAccuracy3 - trainingAccuracy) < tolerance)
                {
                    Display("Stopping: change in training set accuracy less than " + tolerance + "\n");
                    break;
                }

                // Update the previous training accuracies.
                prevAccuracy1 = prevAccuracy2;
                prevAccuracy2 = prevAccuracy3;
                prevAccuracy3 = trainingAccuracy;
            }

            // Output the final training stats.
            TrainingStats(evalParams);


            if (!useAverage)
            {
                return(param);
            }

            if (numTimesSummed == 0) // Improbable but possible according to the Coverity.
            {
                numTimesSummed = 1;
            }

            // Create averaged parameters
            for (var pi = 0; pi < numPreds; pi++)
            {
                for (var aoi = 0; aoi < numOutcomes; aoi++)
                {
                    summedParams[pi].SetParameter(aoi, summedParams[pi].Parameters[aoi] / numTimesSummed);
                }
            }

            return(summedParams);
        }