public List <CategoricalDistribution> DoForwardBackward(object owner, IContextLookup globalVars)
        {
            var randomVariables   = TemporalModel.TransitionModel.GetRandomVariables(owner, globalVars);
            var transitionalModel = new FiniteBayesModel(TemporalModel.TransitionModel.GetNetwork(randomVariables));

            randomVariables = TemporalModel.SensorModel.GetRandomVariables(owner, globalVars, randomVariables);
            var sensoryModel = new FiniteBayesModel(TemporalModel.SensorModel.GetNetwork(randomVariables));

            var temporalMap = TemporalModel.GetReverseTemporalMap(randomVariables);

            var forwardBackwardAlgorithm = new ForwardBackward(transitionalModel, temporalMap, sensoryModel);

            var objEvidences = new java.util.ArrayList(Evidences.Count);

            foreach (List <PropositionInfo> propositions in Evidences)
            {
                var stepEvidences = new java.util.ArrayList(propositions.Count);
                foreach (PropositionInfo proposition in propositions)
                {
                    stepEvidences.add(proposition.GetProposition(owner, globalVars, randomVariables));
                }
                objEvidences.add(stepEvidences);
            }

            CategoricalDistribution objPrior = Prior.GetProbabilityTable(randomVariables);

            return(forwardBackwardAlgorithm.forwardBackward(objEvidences, objPrior).toArray().Select(o => (CategoricalDistribution)o).ToList());
        }
Beispiel #2
0
 /// <summary>
 /// Creates a new model with the specified parameters, outcome names, and
 /// predicate/feature labels.
 /// </summary>
 /// <param name="params">
 ///          The parameters of the model. </param>
 /// <param name="predLabels">
 ///          The names of the predicates used in this model. </param>
 /// <param name="outcomeNames">
 ///          The names of the outcomes this model predicts. </param>
 /// <param name="correctionConstant">
 ///          The maximum number of active features which occur in an event. </param>
 /// <param name="correctionParam">
 ///          The parameter associated with the correction feature. </param>
 /// <param name="prior">
 ///          The prior to be used with this model. </param>
 public GISModel(Context[] parameters, string[] predLabels, string[] outcomeNames, int correctionConstant,
                 double correctionParam, Prior prior)
     : base(parameters, predLabels, outcomeNames, correctionConstant, correctionParam)
 {
     this.prior = prior;
     prior.setLabels(outcomeNames, predLabels);
     modelType = ModelTypeEnum.Maxent;
 }
Beispiel #3
0
        public void DoSomethingThatTriggersPriorEvents(int someValue)
        {
            Prior prior = someValue % 2 == 0 ? Prior.Moderate : Prior.Terminal;

            foreach (var subscriber in _subscribersByPrior[prior])
            {
                subscriber();
            }
        }
 internal MathematicianName(IEnumerable <MathematicianName> priorNames, string firstName, string lastName)
 {
     Prior     = priorNames.ToList();
     FirstName = firstName;
     LastName  = lastName;
     HashCode  = FirstName.Sha256Hash()
                 .Concatenate(LastName.Sha256Hash())
                 .Concatenate(Prior.Select(x => x.HashCodeInt).ToArray())
                 .ToByteArray();
     _hashCodeInt = new Lazy <BigInteger>(ComputeHashCodeInt);
 }
Beispiel #5
0
        /// <summary>
        /// Evaluates the kernel of a point against the basis
        /// </summary>
        /// <param name="x">Input</param>
        /// <returns>Kernel values</returns>
        public Vector KernelOf_X_B(Vector x)
        {
            int            numBasis = NumberBasisPoints;
            Vector         result   = Vector.Zero(numBasis);
            IList <Vector> b        = Basis;

            for (int i = 0; i < numBasis; i++)
            {
                result[i] = Prior.Covariance(x, b[i]);
            }
            return(result);
        }
    public void DoSomethingThatTriggersPriorEvents(int someValue)
    {
        Prior prior = someValue % 2 == 0 ? Prior.Moderate : Prior.Terminal;

        foreach (var script in _scripts)
        {
            if (script.Prior == prior)
            {
                script.Apply();
            }
        }
    }
Beispiel #7
0
        /// <summary>
        /// Evaluates the kernel of a list of points against the basis
        /// </summary>
        /// <param name="XList">List of inputs</param>
        /// <returns>Kernel values</returns>
        public Matrix KernelOf_X_B(IList <Vector> XList)
        {
            int    numPoints = XList.Count;
            int    numBasis  = Basis.Count;
            Matrix kXB       = new Matrix(numPoints, numBasis);

            for (int i = 0; i < numPoints; i++)
            {
                for (int j = 0; j < numBasis; j++)
                {
                    kXB[i, j] = Prior.Covariance(XList[i], Basis[j]);
                }
            }
            return(kXB);
        }
        public CategoricalDistribution AskBayesianModel(object owner, IContextLookup globalVars, IDictionary <string, RandomVariable> randomVariables, FiniteProbabilityModel model)
        {
            var priorProps = Prior.Select(objProp => objProp.GetProposition(owner, globalVars, randomVariables));

            switch (QueryType)
            {
            case BayesianQueryType.Prior:
                return(model.priorDistribution(priorProps.ToArray()));

            case BayesianQueryType.Posterior:
                var posteriorProps = Posteriors.Select(objProp => objProp.GetProposition(owner, globalVars, randomVariables));
                return(model.posteriorDistribution(priorProps.ToArray()[0], posteriorProps.ToArray()));

            case BayesianQueryType.Joint:
                return(model.jointDistribution((priorProps.ToArray())));

            default:
                throw new ArgumentOutOfRangeException();
            }
        }
Beispiel #9
0
        public void Create(Document doc)
        {
            FamilySymbol neocube = new FilteredElementCollector(doc).OfClass(typeof(FamilySymbol)).Where(q => q.Name == "cube").First() as FamilySymbol;

            if (!neocube.IsActive)
            {
                neocube.Activate();
            }
            FamilyInstance unit = doc.Create.NewFamilyInstance(new XYZ(), neocube, StructuralType.NonStructural);

            unit.setP("g_pos", out_Pos);
            unit.setP("g_gost", out_Gost);
            unit.setP("g_name", out_Name);
            unit.setP("g_num", out_Kol_vo);
            unit.setP("g_mass", out_Mass);
            unit.setP("g_other", out_Other);
            unit.setP("g_group", out_Group);
            unit.setP("g_snos", out_Snos);
            //unit.LookupParameter("g_sort").Set(this.Prior).ToString();
            unit.setP("g_sort", Prior.ToString());
        }
Beispiel #10
0
 public void Unsubscribe(Prior prior, Action action)
 {
     _subscribersByPrior[prior].Remove(action);
 }
Beispiel #11
0
 public void Subscribe(Prior prior, Action action)
 {
     _subscribersByPrior[prior].Add(action);
 }
Beispiel #12
0
        /// <summary>
        /// Train a model using the GIS algorithm.
        /// </summary>
        /// <param name="iterations">  The number of GIS iterations to perform. </param>
        /// <param name="di"> The data indexer used to compress events in memory. </param>
        /// <param name="modelPrior"> The prior distribution used to train this model. </param>
        /// <returns> The newly trained model, which can be used immediately or saved
        ///         to disk using an opennlp.maxent.io.GISModelWriter object. </returns>
        public virtual GISModel trainModel(int iterations, DataIndexer di, Prior modelPrior, int cutoff, int threads)
        {
            if (threads <= 0)
            {
                throw new System.ArgumentException("threads must be at least one or greater but is " + threads + "!");
            }

            modelExpects = new MutableContext[threads][];

            /// <summary>
            ///************ Incorporate all of the needed info ***************** </summary>
            display("Incorporating indexed data for training...  \n");
            contexts           = di.Contexts;
            values             = di.Values;
            this.cutoff        = cutoff;
            predicateCounts    = di.PredCounts;
            numTimesEventsSeen = di.NumTimesEventsSeen;
            numUniqueEvents    = contexts.Length;
            this.prior         = modelPrior;
            //printTable(contexts);

            // determine the correction constant and its inverse
            double correctionConstant = 0;

            for (int ci = 0; ci < contexts.Length; ci++)
            {
                if (values == null || values[ci] == null)
                {
                    if (contexts[ci].Length > correctionConstant)
                    {
                        correctionConstant = contexts[ci].Length;
                    }
                }
                else
                {
                    float cl = values[ci][0];
                    for (int vi = 1; vi < values[ci].Length; vi++)
                    {
                        cl += values[ci][vi];
                    }

                    if (cl > correctionConstant)
                    {
                        correctionConstant = cl;
                    }
                }
            }
            display("done.\n");

            outcomeLabels = di.OutcomeLabels;
            outcomeList   = di.OutcomeList;
            numOutcomes   = outcomeLabels.Length;

            predLabels = di.PredLabels;
            prior.setLabels(outcomeLabels, predLabels);
            numPreds = predLabels.Length;

            display("\tNumber of Event Tokens: " + numUniqueEvents + "\n");
            display("\t    Number of Outcomes: " + numOutcomes + "\n");
            display("\t  Number of Predicates: " + numPreds + "\n");

            // set up feature arrays
            float[][] predCount = RectangularArrays.ReturnRectangularFloatArray(numPreds, numOutcomes);
            for (int ti = 0; ti < numUniqueEvents; ti++)
            {
                for (int j = 0; j < contexts[ti].Length; j++)
                {
                    if (values != null && values[ti] != null)
                    {
                        predCount[contexts[ti][j]][outcomeList[ti]] += numTimesEventsSeen[ti] * values[ti][j];
                    }
                    else
                    {
                        predCount[contexts[ti][j]][outcomeList[ti]] += numTimesEventsSeen[ti];
                    }
                }
            }

            //printTable(predCount);
            di = null; // don't need it anymore

            // A fake "observation" to cover features which are not detected in
            // the data.  The default is to assume that we observed "1/10th" of a
            // feature during training.
            double smoothingObservation = _smoothingObservation;

            // Get the observed expectations of the features. Strictly speaking,
            // we should divide the counts by the number of Tokens, but because of
            // the way the model's expectations are approximated in the
            // implementation, this is cancelled out when we compute the next
            // iteration of a parameter, making the extra divisions wasteful.
            parameters = new MutableContext[numPreds];
            for (int i = 0; i < modelExpects.Length; i++)
            {
                modelExpects[i] = new MutableContext[numPreds];
            }
            observedExpects = new MutableContext[numPreds];

            // The model does need the correction constant and the correction feature. The correction constant
            // is only needed during training, and the correction feature is not necessary.
            // For compatibility reasons the model contains form now on a correction constant of 1,
            // and a correction param 0.
            evalParams = new EvalParameters(parameters, 0, 1, numOutcomes);
            int[] activeOutcomes = new int[numOutcomes];
            int[] outcomePattern;
            int[] allOutcomesPattern = new int[numOutcomes];
            for (int oi = 0; oi < numOutcomes; oi++)
            {
                allOutcomesPattern[oi] = oi;
            }
            int numActiveOutcomes = 0;

            for (int pi = 0; pi < numPreds; pi++)
            {
                numActiveOutcomes = 0;
                if (useSimpleSmoothing)
                {
                    numActiveOutcomes = numOutcomes;
                    outcomePattern    = allOutcomesPattern;
                }
                else //determine active outcomes
                {
                    for (int oi = 0; oi < numOutcomes; oi++)
                    {
                        if (predCount[pi][oi] > 0 && predicateCounts[pi] >= cutoff)
                        {
                            activeOutcomes[numActiveOutcomes] = oi;
                            numActiveOutcomes++;
                        }
                    }
                    if (numActiveOutcomes == numOutcomes)
                    {
                        outcomePattern = allOutcomesPattern;
                    }
                    else
                    {
                        outcomePattern = new int[numActiveOutcomes];
                        for (int aoi = 0; aoi < numActiveOutcomes; aoi++)
                        {
                            outcomePattern[aoi] = activeOutcomes[aoi];
                        }
                    }
                }
                parameters[pi] = new MutableContext(outcomePattern, new double[numActiveOutcomes]);
                for (int i = 0; i < modelExpects.Length; i++)
                {
                    modelExpects[i][pi] = new MutableContext(outcomePattern, new double[numActiveOutcomes]);
                }
                observedExpects[pi] = new MutableContext(outcomePattern, new double[numActiveOutcomes]);
                for (int aoi = 0; aoi < numActiveOutcomes; aoi++)
                {
                    int oi = outcomePattern[aoi];
                    parameters[pi].setParameter(aoi, 0.0);
                    foreach (MutableContext[] modelExpect in modelExpects)
                    {
                        modelExpect[pi].setParameter(aoi, 0.0);
                    }
                    if (predCount[pi][oi] > 0)
                    {
                        observedExpects[pi].setParameter(aoi, predCount[pi][oi]);
                    }
                    else if (useSimpleSmoothing)
                    {
                        observedExpects[pi].setParameter(aoi, smoothingObservation);
                    }
                }
            }

            predCount = null; // don't need it anymore

            display("...done.\n");

            /// <summary>
            ///*************** Find the parameters *********************** </summary>
            if (threads == 1)
            {
                display("Computing model parameters ...\n");
            }
            else
            {
                display("Computing model parameters in " + threads + " threads...\n");
            }

            findParameters(iterations, correctionConstant);

            /// <summary>
            ///************* Create and return the model ***************** </summary>
            // To be compatible with old models the correction constant is always 1
            return(new GISModel(parameters, predLabels, outcomeLabels, 1, evalParams.CorrectionParam));
        }