Ejemplo n.º 1
0
        protected AbstractModel(Context[] parameters, IndexHashTable <string> map, string[] outcomeNames)
        {
            this.map          = map;
            this.outcomeNames = outcomeNames;

            evalParameters = new EvalParameters(parameters, outcomeNames.Length);
        }
Ejemplo n.º 2
0
 protected AbstractModel(
     Context[] parameters,
     string[] predLabels,
     string[] outcomeNames,
     int correctionConstant,
     double correctionParam) : this(predLabels, outcomeNames) {
     evalParameters = new EvalParameters(parameters, correctionParam, correctionConstant, outcomeNames.Length);
 }
Ejemplo n.º 3
0
 protected AbstractModel(
     Context[] parameters,
     string[] predLabels,
     string[] outcomeNames,
     int correctionConstant,
     double correctionParam) : this(predLabels, outcomeNames)
 {
     evalParameters = new EvalParameters(parameters, correctionParam, correctionConstant, outcomeNames.Length);
 }
Ejemplo n.º 4
0
        /// <summary>
        /// Use this model to evaluate a context and return an array of the likelihood of each outcome given the specified context and the specified parameters.
        /// </summary>
        /// <param name="context">The integer values of the predicates which have been observed at the present decision point.</param>
        /// <param name="values">The values for each of the parameters.</param>
        /// <param name="prior">The prior distribution for the specified context.</param>
        /// <param name="evalParams">The set of parameters used in this computation.</param>
        /// <returns>
        /// The normalized probabilities for the outcomes given the context.
        /// The indexes of the double[] are the outcome ids, and the actual
        /// string representation of the outcomes can be obtained from the
        /// method getOutcome(int i).
        /// </returns>
        public static double[] Eval(int[] context, float[] values, double[] prior, EvalParameters evalParams) {

            var numfeats = new int[evalParams.NumOutcomes];

            double value = 1;
            for (int ci = 0; ci < context.Length; ci++) {

                if (context[ci] >= 0) {
                    var activeParameters = evalParams.Parameters[context[ci]].Parameters;
                    if (values != null) {
                        value = values[ci];
                    }
                    for (int ai = 0; ai < evalParams.Parameters[context[ci]].Outcomes.Length; ai++) {
                        int oid = evalParams.Parameters[context[ci]].Outcomes[ai];
                        numfeats[oid]++;
                        prior[oid] += activeParameters[ai]*value;
                    }
                }
            }

            double normal = 0.0;
            for (int oid = 0; oid < evalParams.NumOutcomes; oid++) {
                if (!evalParams.CorrectionParam.Equals(0d)) {
                    //prior[oid] = Math.Exp(prior[oid] * model.ConstantInverse + ((1.0 - (numfeats[oid]/model.CorrectionConstant)) * model.CorrectionParam));
                    prior[oid] = Math.Exp(prior[oid] * evalParams.ConstantInverse + ((1.0 - (numfeats[oid] / evalParams.CorrectionConstant)) * evalParams.CorrectionParam));
                } else {
                    prior[oid] = Math.Exp(prior[oid] * evalParams.ConstantInverse);
                }
                normal += prior[oid];
            }

            for (int oid = 0; oid < evalParams.NumOutcomes; oid++) {
                prior[oid] /= normal;
            }

            return prior;
        }
Ejemplo n.º 5
0
 /// <summary>
 /// Use this model to evaluate a context and return an array of the likelihood of each outcome given the specified context and the specified parameters.
 /// </summary>
 /// <param name="context">The integer values of the predicates which have been observed at the present decision point.</param>
 /// <param name="prior">The prior distribution for the specified context.</param>
 /// <param name="evalParams">The set of parameters used in this computation.</param>
 /// <returns>
 /// The normalized probabilities for the outcomes given the context.
 /// The indexes of the double[] are the outcome ids, and the actual
 /// string representation of the outcomes can be obtained from the
 /// method getOutcome(int i).
 /// </returns>
 public static double[] Eval(int[] context, double[] prior, EvalParameters evalParams) {
     return Eval(context, null, prior, evalParams);
 }
Ejemplo n.º 6
0
        /// <summary>
        /// Use this model to evaluate a context and return an array of the likelihood of each outcome given that context.
        /// </summary>
        /// <param name="context">The names of the predicates which have been observed at the present decision point.</param>
        /// <param name="values">This is where the distribution is stored.</param>
        /// <param name="prior">The prior distribution for the specified context.</param>
        /// <param name="evalParams">The set of parameters used in this computation.</param>
        /// <param name="normalize">if set to <c>true</c> the probabilities will be normalized.</param>
        /// <returns>The normalized probabilities for the outcomes given the context. 
        /// The indexes of the double[] are the outcome ids, and the actual string representation of 
        /// the outcomes can be obtained from the method getOutcome(int i).</returns>
        public static double[] Eval(int[] context, float[] values, double[] prior, EvalParameters evalParams, bool normalize) {
            double value = 1;
            for (var ci = 0; ci < context.Length; ci++) {
                if (context[ci] >= 0) {
                    var predParams = evalParams.Parameters[context[ci]];
                    var activeOutcomes = predParams.Outcomes;
                    var activeParameters = predParams.Parameters;
                    if (values != null) {
                        value = values[ci];
                    }
                    for (var ai = 0; ai < activeOutcomes.Length; ai++) {
                        var oid = activeOutcomes[ai];
                        prior[oid] += activeParameters[ai]*value;
                    }
                }
            }

            if (!normalize) 
                return prior;

            var numOutcomes = evalParams.NumOutcomes;

            double maxPrior = 1;

            for (var oid = 0; oid < numOutcomes; oid++) {
                if (maxPrior < Math.Abs(prior[oid]))
                    maxPrior = Math.Abs(prior[oid]);
            }

            var normal = 0.0;
            for (var oid = 0; oid < numOutcomes; oid++) {
                prior[oid] = Math.Exp(prior[oid]/maxPrior);
                normal += prior[oid];
            }

            for (var oid = 0; oid < numOutcomes; oid++)
                prior[oid] /= normal;

            return prior;
        }
Ejemplo n.º 7
0
 protected AbstractModel(Context[] parameters, string[] predLabels, string[] outcomeNames)
     : this(predLabels, outcomeNames)
 {
     evalParameters = new EvalParameters(parameters, outcomeNames.Length);
 }
Ejemplo n.º 8
0
 protected AbstractModel(Context[] parameters, string[] predLabels, IndexHashTable<string> map, string[] outcomeNames) {
     this.map = map;
     this.outcomeNames = outcomeNames;
     evalParameters = new EvalParameters(parameters, outcomeNames.Length);
 }
Ejemplo n.º 9
0
 protected AbstractModel(Context[] parameters, string[] predLabels, string[] outcomeNames)
     : this(predLabels, outcomeNames) {
     evalParameters = new EvalParameters(parameters, outcomeNames.Length);
 }
Ejemplo n.º 10
0
        private MutableContext[] FindParameters(int iterations, bool useAverage) {

            info.Append("  Number of Iterations: {0}\n", iterations);

            Display("\nPerforming " + iterations + " iterations.\n");

            var allOutcomesPattern = new int[numOutcomes];
            for (var oi = 0; oi < numOutcomes; oi++)
                allOutcomesPattern[oi] = oi;

            /** Stores the estimated parameter value of each predicate during iteration. */
            var param = new MutableContext[numPreds];
            for (var pi = 0; pi < numPreds; pi++) {
                param[pi] = new MutableContext(allOutcomesPattern, new double[numOutcomes]);
                for (var aoi = 0; aoi < numOutcomes; aoi++)
                    param[pi].SetParameter(aoi, 0.0);
            }

            // ReSharper disable once CoVariantArrayConversion
            var evalParams = new EvalParameters(param, numOutcomes);

            // Stores the sum of parameter values of each predicate over many iterations.
            var summedParams = new MutableContext[numPreds];
            if (useAverage) {
                for (var pi = 0; pi < numPreds; pi++) {
                    summedParams[pi] = new MutableContext(allOutcomesPattern, new double[numOutcomes]);
                    for (var aoi = 0; aoi < numOutcomes; aoi++)
                        summedParams[pi].SetParameter(aoi, 0.0);
                }
            }

            // Keep track of the previous three accuracies. The difference of
            // the mean of these and the current training set accuracy is used
            // with tolerance to decide whether to stop.
            var prevAccuracy1 = 0.0;
            var prevAccuracy2 = 0.0;
            var prevAccuracy3 = 0.0;

            // A counter for the denominator for averaging.
            var numTimesSummed = 0;

            double stepSize = 1;
            for (var i = 1; i <= iterations; i++) {
                // Decrease the step size by a small amount.
                if (stepSizeDecrease > 0)
                    stepSize *= 1 - stepSizeDecrease;


                if (Monitor != null && Monitor.Token.CanBeCanceled)
                    Monitor.Token.ThrowIfCancellationRequested();

                var numCorrect = 0;

                for (var ei = 0; ei < numUniqueEvents; ei++) {
                    var targetOutcome = outcomeList[ei];

                    for (var ni = 0; ni < numTimesEventsSeen[ei]; ni++) {
                        // Compute the model's prediction according to the current parameters.
                        var modelDistribution = new double[numOutcomes];

                        PerceptronModel.Eval(
                            contexts[ei], 
                            values != null ? values[ei] : null, 
                            modelDistribution,
                            evalParams, false);

                        var maxOutcome = MaxIndex(modelDistribution);

                        // If the predicted outcome is different from the target
                        // outcome, do the standard update: boost the parameters
                        // associated with the target and reduce those associated
                        // with the incorrect predicted outcome.
                        if (maxOutcome != targetOutcome) {
                            for (var ci = 0; ci < contexts[ei].Length; ci++) {
                                var pi = contexts[ei][ci];
                                if (values == null) {
                                    param[pi].UpdateParameter(targetOutcome, stepSize);
                                    param[pi].UpdateParameter(maxOutcome, -stepSize);
                                } else {
                                    param[pi].UpdateParameter(targetOutcome, stepSize*values[ei][ci]);
                                    param[pi].UpdateParameter(maxOutcome, -stepSize*values[ei][ci]);
                                }
                            }
                        }

                        // Update the counts for accuracy.
                        if (maxOutcome == targetOutcome)
                            numCorrect++;
                    }
                }

                // Calculate the training accuracy and display.
                var trainingAccuracy = (double) numCorrect/numEvents;
                Display(string.Format("{0,-4} {1} of {2} - {3}",
                    i,
                    numCorrect,
                    numEvents,
                    trainingAccuracy));
                    

                // TODO: Make averaging configurable !!!

                bool doAveraging;

                if (useAverage && UseSkippedAveraging && (i < 20 || IsPerfectSquare(i))) {
                    doAveraging = true;
                } else if (useAverage) {
                    doAveraging = true;
                } else {
                    doAveraging = false;
                }

                if (doAveraging) {
                    numTimesSummed++;
                    for (var pi = 0; pi < numPreds; pi++)
                        for (var aoi = 0; aoi < numOutcomes; aoi++)
                            summedParams[pi].UpdateParameter(aoi, param[pi].Parameters[aoi]);
                }

                // If the tolerance is greater than the difference between the
                // current training accuracy and all of the previous three
                // training accuracies, stop training.
                if (Math.Abs(prevAccuracy1 - trainingAccuracy) < tolerance &&
                    Math.Abs(prevAccuracy2 - trainingAccuracy) < tolerance && 
                    Math.Abs(prevAccuracy3 - trainingAccuracy) < tolerance) {

                    Display("Stopping: change in training set accuracy less than " + tolerance + "\n");
                    break;
                }

                // Update the previous training accuracies.
                prevAccuracy1 = prevAccuracy2;
                prevAccuracy2 = prevAccuracy3;
                prevAccuracy3 = trainingAccuracy;
            }

            // Output the final training stats.
            TrainingStats(evalParams);

            
            if (!useAverage) 
                return param;

            if (numTimesSummed == 0) // Improbable but possible according to the Coverity.
                numTimesSummed = 1;

            // Create averaged parameters
            for (var pi = 0; pi < numPreds; pi++)
                for (var aoi = 0; aoi < numOutcomes; aoi++)
                    summedParams[pi].SetParameter(aoi, summedParams[pi].Parameters[aoi]/numTimesSummed);

            return summedParams;
        }
Ejemplo n.º 11
0
        private void TrainingStats(EvalParameters evalParams) {
            var numCorrect = 0;

            for (var ei = 0; ei < numUniqueEvents; ei++) {
                for (var ni = 0; ni < numTimesEventsSeen[ei]; ni++) {
                    var modelDistribution = new double[numOutcomes];

                    PerceptronModel.Eval(
                        contexts[ei], 
                        values != null ? values[ei] : null, 
                        modelDistribution, 
                        evalParams,
                        false);

                    var max = MaxIndex(modelDistribution);
                    if (max == outcomeList[ei])
                        numCorrect++;
                }
            }
            var trainingAccuracy = (double) numCorrect/numEvents;

            info.Append("        Correct Events: {0}\n" +
                        "          Total Events: {1}\n" +
                        "              Accuracy: {2}\n", numCorrect, numEvents, trainingAccuracy);

            Display("\nPerceptron training complete:\n");
            Display("\t Correct Events : " + numCorrect);
            Display("\t   Total Events : " + numEvents);
            Display("\t       Accuracy : " + trainingAccuracy);            
        }