protected IGeneratedAlgorithm InferAll(bool inferOnlySpecifiedVars, IEnumerable <IVariable> vars) { IGeneratedAlgorithm ca = GetCompiledInferenceAlgorithm(inferOnlySpecifiedVars, vars); Execute(ca); return(ca); }
/// <summary> /// Infer the output messages, for the given model and batch number, for all /// shared variables in this set /// </summary> /// <param name="ca">Compiled algorithm</param> /// <param name="modelNumber">Model id</param> /// <param name="batchNumber">Batch number</param> public void InferOutput(IGeneratedAlgorithm ca, Model modelNumber, int batchNumber) { foreach (ISharedVariable v in this) { v.InferOutput(ca, modelNumber, batchNumber); } }
/// <summary> /// Handles the engine message updated. /// </summary> /// <param name="algorithm">The algorithm.</param> /// <param name="messageEvent">The <see cref="MessageUpdatedEventArgs"/> instance containing the event data.</param> private void EngineMessageUpdated(IGeneratedAlgorithm algorithm, MessageUpdatedEventArgs messageEvent) { if (!this.MessageHistories.ContainsKey(messageEvent.MessageId)) { this.MessageHistories[messageEvent.MessageId] = new List <Bernoulli>(); } // Console.WriteLine(messageEvent); if (messageEvent.Message is Bernoulli) { this.MessageHistories[messageEvent.MessageId].Add((Bernoulli)messageEvent.Message); } }
/// <summary> /// Message updated. /// </summary> /// <param name="algorithm">The algorithm.</param> /// <param name="messageEvent">The <see cref="MessageUpdatedEventArgs"/> instance containing the event data.</param> private void MessageUpdated(IGeneratedAlgorithm algorithm, MessageUpdatedEventArgs messageEvent) { if (!this.MessageHistories.ContainsKey(messageEvent.MessageId)) { this.MessageHistories[messageEvent.MessageId] = new List <Gaussian>(); } if (messageEvent.Message is Gaussian item) { this.MessageHistories[messageEvent.MessageId].Add(item); } // Console.WriteLine(messageEvent); }
private void Execute(IGeneratedAlgorithm ca) { // If there is a message update listener, try to add in the engine to listen to messages. if (this.MessageUpdated != null) { DebuggingSupport.TryAddRemoveEventListenerDynamic(ca, OnMessageUpdated, add: true); } // Register the ProgressChanged handler only while doing inference within InferenceEngine. // We do not want the handler to run if the user accesses the GeneratedAlgorithms directly. ca.ProgressChanged += OnProgressChanged; try { Stopwatch s = null; if (ShowTimings) { s = new Stopwatch(); s.Start(); FileStats.Clear(); } if (ResetOnObservedValueChanged) { ca.Execute(NumberOfIterations); } else { ca.Update(NumberOfIterations - ca.NumberOfIterationsDone); } if (s != null) { long elapsed = s.ElapsedMilliseconds; Console.WriteLine("Inference time was {1}ms (max {0} iterations)", NumberOfIterations, elapsed); if (FileStats.ReadCount > 0 || FileStats.WriteCount > 0) { Console.WriteLine("{0} file reads {1} file writes", FileStats.ReadCount, FileStats.WriteCount); } } } finally { ca.ProgressChanged -= OnProgressChanged; if (this.MessageUpdated != null) { DebuggingSupport.TryAddRemoveEventListenerDynamic(ca, OnMessageUpdated, add: false); } } }
/// <summary> /// Tries to dynamically add or remove an event handler to a generated algorithm instance. /// </summary> /// <param name="ca">The generated algorithm instance</param> /// <param name="d">The event handler to add or remove</param> /// <param name="add">If true will add, otherwise will remove</param> /// <returns>True if the event handler was added or removed successfully</returns> internal static bool TryAddRemoveEventListenerDynamic(IGeneratedAlgorithm ca, EventHandler <MessageUpdatedEventArgs> d, bool add) { var eventInfo = ca.GetType().GetEvent(MessageEventName); if (eventInfo == null) { return(false); } if (add) { eventInfo.AddEventHandler(ca, d); } else { eventInfo.RemoveEventHandler(ca, d); } return(true); }
/// <summary> /// Compiles the last built model into a CompiledAlgorithm which implements /// the specified inference algorithm on the model. /// </summary> /// <returns></returns> private IGeneratedAlgorithm Compile() { mb.SetModelName(ModelNamespace, ModelName); if (ShowMsl) { Console.WriteLine(mb.ModelString()); } if (ShowFactorGraph || SaveFactorGraphToFolder != null) { if (SaveFactorGraphToFolder != null && Visualizer?.GraphWriter != null) { Directory.CreateDirectory(SaveFactorGraphToFolder); Visualizer.GraphWriter.WriteGraph(mb, SaveFactorGraphToFolder + @"\" + ModelName); } if (ShowFactorGraph && Visualizer?.FactorGraphVisualizer != null) { Visualizer.FactorGraphVisualizer.VisualizeFactorGraph(mb); } } Stopwatch s = null; if (ShowTimings) { s = new Stopwatch(); s.Start(); } IGeneratedAlgorithm compiledAlgorithm = Compiler.CompileWithoutParams(mb.modelType, null, mb.Attributes); if (ShowTimings) { s.Stop(); Console.WriteLine("Compilation time was " + s.ElapsedMilliseconds + "ms."); } CompiledAlgorithmInfo info = new CompiledAlgorithmInfo(compiledAlgorithm, mb.observedVars); compiledAlgorithms.Push(info); foreach (IVariable v in mb.variablesToInfer) { compiledAlgorithmForVariable[v] = info; } SetObservedValues(info); return(info.exec); }
/// <summary> /// Initializes a new instance of the <see cref="BinaryModel"/> class. /// </summary> /// <param name="trainModel">If set to <c>true</c> train model.</param> /// <param name="showFactorGraph">If set to <c>true</c> show factor graph.</param> /// <param name="debug">If set to <c>true</c> debug.</param> public BinaryModel(bool trainModel, bool showFactorGraph = false, bool debug = false) { evidence = Variable.Bernoulli(0.5).Named("evidence"); using (Variable.If(evidence)) { numberOfResidents = Variable.New<int>().Named("numberOfResidents").Attrib(new DoNotInfer()); numberOfFeatures = Variable.New<int>().Named("numberOfFeatures").Attrib(new DoNotInfer()); var resident = new Range(numberOfResidents).Named("resident"); var feature = new Range(numberOfFeatures).Named("feature"); numberOfExamples = Variable.Array<int>(resident).Named("numberOfExamples").Attrib(new DoNotInfer()); var example = new Range(numberOfExamples[resident]).Named("example").Attrib(new Sequential()); noisePrecision = Variable.New<double>().Named("noisePrecision").Attrib(new DoNotInfer()); weightPriorMeans = Variable.New<GaussianArray>().Named("weightPriorMeans").Attrib(new DoNotInfer()); weightPriorPrecisions = Variable.New<GammaArray>().Named("weightPriorPrecisions").Attrib(new DoNotInfer()); weightMeans = Variable.Array<double>(feature).Named("weightMeans"); weightPrecisions = Variable.Array<double>(feature).Named("weightPrecisions"); weightMeans.SetTo(Variable<double[]>.Random(weightPriorMeans)); weightPrecisions.SetTo(Variable<double[]>.Random(weightPriorPrecisions)); weights = Variable.Array(Variable.Array<double>(feature), resident).Named("weights"); featureValues = Variable.Array(Variable.Array(Variable.Array<double>(feature), example), resident).Named("featureValues").Attrib(new DoNotInfer()); // if (!useBias) // { // thresholdPriors = Variable.New<GaussianArray>().Named("thresholdPrior").Attrib(new DoNotInfer()); // thresholds = Variable.Array<double>(resident).Named("threshold"); // thresholds.SetTo(Variable<double[]>.Random(thresholdPriors)); // } activities = Variable.Array(Variable.Array<bool>(example), resident).Named("activities"); // activities[resident][example].AddAttribute(new MarginalPrototype(new Bernoulli())); using (Variable.ForEach(resident)) { var products = Variable.Array(Variable.Array<double>(feature), example).Named("products"); var scores = Variable.Array<double>(example).Named("scores"); var scoresPlusNoise = Variable.Array<double>(example).Named("scoresPlusNoise"); weights[resident][feature] = Variable.GaussianFromMeanAndPrecision(weightMeans[feature], weightPrecisions[feature]); using (Variable.ForEach(example)) { using (Variable.ForEach(feature)) { products[example][feature] = weights[resident][feature] * featureValues[resident][example][feature]; } scores[example] = Variable.Sum(products[example]).Named("score"); scoresPlusNoise[example] = Variable.GaussianFromMeanAndPrecision(scores[example], noisePrecision).Named("scorePlusNoise"); // if (useBias) { activities[resident][example] = scoresPlusNoise[example] > 0; } // else // { // var diff = (scoresPlusNoise[example] - thresholds[resident]).Named("diff"); // activities[example][resident] = diff > 0; // } } } } engine = new InferenceEngine { Algorithm = new ExpectationPropagation { DefaultNumberOfIterations = trainModel ? 10 : 1 }, ShowFactorGraph = showFactorGraph, ShowProgress = false, // BrowserMode = BrowserMode.Never, // debug ? BrowserMode.OnError : BrowserMode.Never, ShowWarnings = debug }; if (debug) { engine.Compiler.GenerateInMemory = false; engine.Compiler.WriteSourceFiles = true; engine.Compiler.IncludeDebugInformation = true; engine.Compiler.CatchExceptions = true; } #if USE_PRECOMPILED_ALGORITHM numberOfResidents.ObservedValue = default(int); numberOfExamples.ObservedValue = default(int); numberOfFeatures.ObservedValue = default(int); noisePrecision.ObservedValue = default(double); featureValues.ObservedValue = default(double[][][]); weightPriorMeans.ObservedValue = default(DistributionStructArray<Gaussian, double>); // (DistributionStructArray<Gaussian, double>)Distribution<double>.Array(default(Gaussian[])); weightPriorPrecisions.ObservedValue = default(DistributionStructArray<Gamma, double>); // (DistributionStructArray<Gamma, double>)Distribution<double>.Array(default(Gamma[])); activities.ObservedValue = default(bool[][]); if (trainModel) { activities.AddAttribute(new DoNotInfer()); algorithm = engine.GetCompiledInferenceAlgorithm(new IVariable[] { weights, weightMeans, weightPrecisions }); } else { activities.AddAttribute(QueryTypes.Marginal); algorithm = engine.GetCompiledInferenceAlgorithm(new IVariable[] { activities }); } #endif }
/// <summary> /// Infers the marginal distribution for the specified variable. /// </summary> /// <typeparam name="TReturn">Desired return type which may be a distribution type or an array type if the argument is a VariableArray</typeparam> /// <param name="var">The variable whose marginal is to be inferred</param> /// <returns>The marginal distribution (or an approximation to it)</returns> public TReturn Infer <TReturn>(IVariable var) { IGeneratedAlgorithm ca = InferAll(false, var); return(ca.Marginal <TReturn>(var.Name)); }
/// <summary> /// Infers the marginal distribution for the specified variable. /// </summary> /// <param name="var">The variable whose marginal is to be inferred</param> /// <returns>The marginal distribution (or an approximation to it)</returns> public object Infer(IVariable var) { IGeneratedAlgorithm ca = InferAll(false, var); return(ca.Marginal(var.Name)); }
public CompiledAlgorithmInfo(IGeneratedAlgorithm exec, IEnumerable <Variable> observedVars) { this.exec = exec; this.observedVarsInOrder.AddRange(observedVars); this.observedVars.AddRange(observedVars); }
/// <summary> /// Update all the SharedVariables registered with this model. /// </summary> /// <param name="engine"></param> /// <param name="batchNumber">A number from 0 to BatchCount-1</param> public void InferShared(IGeneratedAlgorithm engine, int batchNumber) { SharedVariables.SetInput(this, batchNumber); SharedVariables.InferOutput(engine, this, batchNumber); }
/// <summary> /// Infer the shared variable's output message for the given model and batch number. /// </summary> /// <param name="ca">The compiled algorithm.</param> /// <param name="modelNumber">The model id.</param> /// <param name="batchNumber">The batch number.</param> public abstract void InferOutput(IGeneratedAlgorithm ca, Model modelNumber, int batchNumber);
/// <summary> /// Initializes a new instance of the <see cref="BinaryModel"/> class. /// </summary> /// <param name="trainModel">If set to <c>true</c> train model.</param> /// <param name="showFactorGraph">If set to <c>true</c> show factor graph.</param> /// <param name="debug">If set to <c>true</c> debug.</param> public BinaryModel(bool trainModel, bool showFactorGraph = false, bool debug = false) { evidence = Variable.Bernoulli(0.5).Named("evidence"); using (Variable.If(evidence)) { numberOfResidents = Variable.New <int>().Named("numberOfResidents").Attrib(new DoNotInfer()); numberOfFeatures = Variable.New <int>().Named("numberOfFeatures").Attrib(new DoNotInfer()); var resident = new Range(numberOfResidents).Named("resident"); var feature = new Range(numberOfFeatures).Named("feature"); numberOfExamples = Variable.Array <int>(resident).Named("numberOfExamples").Attrib(new DoNotInfer()); var example = new Range(numberOfExamples[resident]).Named("example").Attrib(new Sequential()); noisePrecision = Variable.New <double>().Named("noisePrecision").Attrib(new DoNotInfer()); weightPriorMeans = Variable.New <GaussianArray>().Named("weightPriorMeans").Attrib(new DoNotInfer()); weightPriorPrecisions = Variable.New <GammaArray>().Named("weightPriorPrecisions").Attrib(new DoNotInfer()); weightMeans = Variable.Array <double>(feature).Named("weightMeans"); weightPrecisions = Variable.Array <double>(feature).Named("weightPrecisions"); weightMeans.SetTo(Variable <double[]> .Random(weightPriorMeans)); weightPrecisions.SetTo(Variable <double[]> .Random(weightPriorPrecisions)); weights = Variable.Array(Variable.Array <double>(feature), resident).Named("weights"); featureValues = Variable.Array(Variable.Array(Variable.Array <double>(feature), example), resident).Named("featureValues").Attrib(new DoNotInfer()); // if (!useBias) // { // thresholdPriors = Variable.New<GaussianArray>().Named("thresholdPrior").Attrib(new DoNotInfer()); // thresholds = Variable.Array<double>(resident).Named("threshold"); // thresholds.SetTo(Variable<double[]>.Random(thresholdPriors)); // } activities = Variable.Array(Variable.Array <bool>(example), resident).Named("activities"); // activities[resident][example].AddAttribute(new MarginalPrototype(new Bernoulli())); using (Variable.ForEach(resident)) { var products = Variable.Array(Variable.Array <double>(feature), example).Named("products"); var scores = Variable.Array <double>(example).Named("scores"); var scoresPlusNoise = Variable.Array <double>(example).Named("scoresPlusNoise"); weights[resident][feature] = Variable.GaussianFromMeanAndPrecision(weightMeans[feature], weightPrecisions[feature]); using (Variable.ForEach(example)) { using (Variable.ForEach(feature)) { products[example][feature] = weights[resident][feature] * featureValues[resident][example][feature]; } scores[example] = Variable.Sum(products[example]).Named("score"); scoresPlusNoise[example] = Variable.GaussianFromMeanAndPrecision(scores[example], noisePrecision).Named("scorePlusNoise"); // if (useBias) { activities[resident][example] = scoresPlusNoise[example] > 0; } // else // { // var diff = (scoresPlusNoise[example] - thresholds[resident]).Named("diff"); // activities[example][resident] = diff > 0; // } } } } engine = new InferenceEngine { Algorithm = new ExpectationPropagation { DefaultNumberOfIterations = trainModel ? 10 : 1 }, ShowFactorGraph = showFactorGraph, ShowProgress = false, // BrowserMode = BrowserMode.Never, // debug ? BrowserMode.OnError : BrowserMode.Never, ShowWarnings = debug }; if (debug) { engine.Compiler.GenerateInMemory = false; engine.Compiler.WriteSourceFiles = true; engine.Compiler.IncludeDebugInformation = true; engine.Compiler.CatchExceptions = true; } #if USE_PRECOMPILED_ALGORITHM numberOfResidents.ObservedValue = default(int); numberOfExamples.ObservedValue = default(int); numberOfFeatures.ObservedValue = default(int); noisePrecision.ObservedValue = default(double); featureValues.ObservedValue = default(double[][][]); weightPriorMeans.ObservedValue = default(DistributionStructArray <Gaussian, double>); // (DistributionStructArray<Gaussian, double>)Distribution<double>.Array(default(Gaussian[])); weightPriorPrecisions.ObservedValue = default(DistributionStructArray <Gamma, double>); // (DistributionStructArray<Gamma, double>)Distribution<double>.Array(default(Gamma[])); activities.ObservedValue = default(bool[][]); if (trainModel) { activities.AddAttribute(new DoNotInfer()); algorithm = engine.GetCompiledInferenceAlgorithm(new IVariable[] { weights, weightMeans, weightPrecisions }); } else { activities.AddAttribute(QueryTypes.Marginal); algorithm = engine.GetCompiledInferenceAlgorithm(new IVariable[] { activities }); } #endif }