public void setUp() { uw = new ForwardBackward( GenericTemporalModelFactory.getUmbrellaWorldTransitionModel(), GenericTemporalModelFactory.getUmbrellaWorld_Xt_to_Xtm1_Map(), GenericTemporalModelFactory.getUmbrellaWorldSensorModel()); }
public void HealthyFeverTest() { double[,] A = { { 0.7, 0.3 }, { 0.4, 0.6 } }; double[,] B = { { 0.5, 0.4, 0.1 }, { 0.1, 0.3, 0.6 } }; double[] pi = { 0.6, 0.4 }; int[] observation = { 0, 1, 2 }; string[] Q = { "Healthy", "Fever" }; int[] V = { 0, 1, 2 }; var T = observation.Length; // length of the observation sequence var N = Q.Length; // number of states in the model (H, C) var M = V.Length; // number of observation model (Small, Medium, Large) var expectedResult = new string[] { "Healthy", "Healthy", "Fever" }; var resultFromViterbi = ForwardBackward.GetResult(T, N, M, Q, V, A, B, pi, observation); CollectionAssert.AreEqual(expectedResult, resultFromViterbi); }
public void TestForwardNormalizedRun2() { var states = new List <IState> { new State(0, "s"), new State(1, "t") }; var startDistribution = new [] { 0.85, 0.15 }; var tpm = new double[2][]; tpm[0] = new [] { 0.3, 0.7 }; tpm[1] = new [] { 0.1, 0.9 }; var observations = new List <IObservation> { new Observation(new double[] { 0 }, "A"), new Observation(new double[] { 1 }, "B"), new Observation(new double[] { 1 }, "B"), new Observation(new double[] { 0 }, "A") }; var emissions = new DiscreteDistribution[2]; emissions[0] = new DiscreteDistribution(new double[] { 0, 1 }, new[] { 0.4, 0.6 }); emissions[1] = new DiscreteDistribution(new double[] { 0, 1 }, new[] { 0.5, 0.5 }); var algo = new ForwardBackward(true); var res = algo.RunForward(observations, HiddenMarkovModelStateFactory.GetState(new ModelCreationParameters <DiscreteDistribution>() { Pi = startDistribution, TransitionProbabilityMatrix = tpm, Emissions = emissions })); //new HiddenMarkovModelState<DiscreteDistribution>(startDistribution, tpm, emissions)); Assert.AreEqual(-2.9037969640415056, res); }
public void TestForwardRun1() { var states = new List <IState> { new State(0, "H"), new State(1, "L") }; var startDistribution = new [] { 0.5, 0.5 }; var tpm = new double[2][]; tpm[0] = new[] { 0.5, 0.5 }; tpm[1] = new[] { 0.4, 0.6 }; var observations = new List <IObservation> { new Observation(new double[] { 2 }, "G"), new Observation(new double[] { 2 }, "G"), new Observation(new double[] { 1 }, "C"), new Observation(new double[] { 0 }, "A") }; var emissions = new DiscreteDistribution[2]; emissions[0] = new DiscreteDistribution(new double[] { 0, 1, 2, 3 }, new[] { 0.2, 0.3, 0.3, 0.2 }); emissions[1] = new DiscreteDistribution(new double[] { 0, 1, 2, 3 }, new[] { 0.3, 0.2, 0.2, 0.3 }); var algo = new ForwardBackward(false); var res = algo.RunForward(observations, HiddenMarkovModelStateFactory.GetState(new ModelCreationParameters <DiscreteDistribution>() { Pi = startDistribution, TransitionProbabilityMatrix = tpm, Emissions = emissions })); //new HiddenMarkovModelState<DiscreteDistribution>(startDistribution, tpm, emissions)); Assert.AreEqual(0.0038431500000000005, res); }
private double[] PredictNextValue <TDistribution>(IHiddenMarkovModel <TDistribution> model, IPredictionRequest request, double[][] trainingSet) where TDistribution : IDistribution { var N = trainingSet.Length; var K = trainingSet[0].Length; var result = new double[K]; var yesterday = trainingSet[N - 1]; var forwardBackward = new ForwardBackward(model.Normalized); var yesterdayLikelihood = forwardBackward.RunForward(Helper.Convert(new[] { yesterday }), model); Debug.WriteLine("Yesterday Likelihood : " + new Vector(yesterday) + " : " + yesterdayLikelihood + " "); var guessess = FindMostSimilarObservations(model, trainingSet, yesterdayLikelihood, request.Tolerance); var bestGuessPlace = FindBestGuess(request, guessess); var tomorrow = trainingSet[bestGuessPlace.PlaceInSequence + 1]; var mostSimilar = trainingSet[bestGuessPlace.PlaceInSequence]; for (var k = 0; k < K; k++) { if (bestGuessPlace.PlaceInSequence != trainingSet.Length) { result[k] = yesterday[k] + (tomorrow[k] - mostSimilar[k]); } } Debug.WriteLine("Predicted (for day " + N + ") : " + new Vector(result) + " : " + forwardBackward.RunForward(Helper.Convert(result), model)); return(result); }
public List <CategoricalDistribution> DoForwardBackward(object owner, IContextLookup globalVars) { var randomVariables = TemporalModel.TransitionModel.GetRandomVariables(owner, globalVars); var transitionalModel = new FiniteBayesModel(TemporalModel.TransitionModel.GetNetwork(randomVariables)); randomVariables = TemporalModel.SensorModel.GetRandomVariables(owner, globalVars, randomVariables); var sensoryModel = new FiniteBayesModel(TemporalModel.SensorModel.GetNetwork(randomVariables)); var temporalMap = TemporalModel.GetReverseTemporalMap(randomVariables); var forwardBackwardAlgorithm = new ForwardBackward(transitionalModel, temporalMap, sensoryModel); var objEvidences = new java.util.ArrayList(Evidences.Count); foreach (List <PropositionInfo> propositions in Evidences) { var stepEvidences = new java.util.ArrayList(propositions.Count); foreach (PropositionInfo proposition in propositions) { stepEvidences.add(proposition.GetProposition(owner, globalVars, randomVariables)); } objEvidences.add(stepEvidences); } CategoricalDistribution objPrior = Prior.GetProbabilityTable(randomVariables); return(forwardBackwardAlgorithm.forwardBackward(objEvidences, objPrior).toArray().Select(o => (CategoricalDistribution)o).ToList()); }
public double HeuristicFunction <TDistribution>(double[] node, IHiddenMarkovModel <TDistribution> model) where TDistribution : IDistribution { //var arr = trainingSet.Concat(new []{ node }); var forwardBackward = new ForwardBackward(model.Normalized); var h = forwardBackward.RunForward(Helper.Convert(new[] { node }), model); return(h); }
static void forwardBackWardDemo() { System.Console.WriteLine("DEMO: Forward-BackWard"); System.Console.WriteLine("======================"); System.Console.WriteLine("Umbrella World"); System.Console.WriteLine("--------------"); ForwardBackward uw = new ForwardBackward( GenericTemporalModelFactory.getUmbrellaWorldTransitionModel(), GenericTemporalModelFactory.getUmbrellaWorld_Xt_to_Xtm1_Map(), GenericTemporalModelFactory.getUmbrellaWorldSensorModel()); ICategoricalDistribution prior = new ProbabilityTable(new double[] { 0.5, 0.5 }, ExampleRV.RAIN_t_RV); // Day 1 ICollection <ICollection <AssignmentProposition> > evidence = CollectionFactory.CreateQueue <ICollection <AssignmentProposition> >(); ICollection <AssignmentProposition> e1 = CollectionFactory.CreateQueue <AssignmentProposition>(); e1.Add(new AssignmentProposition(ExampleRV.UMBREALLA_t_RV, true)); evidence.Add(e1); ICollection <ICategoricalDistribution> smoothed = uw.forwardBackward(evidence, prior); System.Console.WriteLine("Day 1 (Umbrealla_t=true) smoothed:\nday 1 = " + smoothed.Get(0)); // Day 2 ICollection <AssignmentProposition> e2 = CollectionFactory.CreateQueue <AssignmentProposition>(); e2.Add(new AssignmentProposition(ExampleRV.UMBREALLA_t_RV, true)); evidence.Add(e2); smoothed = uw.forwardBackward(evidence, prior); System.Console.WriteLine("Day 2 (Umbrealla_t=true) smoothed:\nday 1 = " + smoothed.Get(0) + "\nday 2 = " + smoothed.Get(1)); // Day 3 ICollection <AssignmentProposition> e3 = CollectionFactory.CreateQueue <AssignmentProposition>(); e3.Add(new AssignmentProposition(ExampleRV.UMBREALLA_t_RV, false)); evidence.Add(e3); smoothed = uw.forwardBackward(evidence, prior); System.Console.WriteLine("Day 3 (Umbrealla_t=false) smoothed:\nday 1 = " + smoothed.Get(0) + "\nday 2 = " + smoothed.Get(1) + "\nday 3 = " + smoothed.Get(2)); System.Console.WriteLine("======================"); }
public void TestBackwardNormalizedRun2() { var states = new List <IState> { new State(0, "s"), new State(1, "t") }; var startDistribution = new [] { 0.85, 0.15 }; var tpm = new double[2][]; tpm[0] = new [] { 0.3, 0.7 }; tpm[1] = new [] { 0.1, 0.9 }; var observations = new List <IObservation> { new Observation(new double[] { 0 }, "A"), new Observation(new double[] { 1 }, "B"), new Observation(new double[] { 1 }, "B"), new Observation(new double[] { 0 }, "A") }; var emissions = new DiscreteDistribution[2]; emissions[0] = new DiscreteDistribution(new double[] { 0, 1 }, new[] { 0.4, 0.6 }); emissions[1] = new DiscreteDistribution(new double[] { 0, 1 }, new[] { 0.5, 0.5 }); var algo = new ForwardBackward(true); var res = algo.RunBackward(observations, HiddenMarkovModelStateFactory.GetState(new ModelCreationParameters <DiscreteDistribution>() { Pi = startDistribution, TransitionProbabilityMatrix = tpm, Emissions = emissions })); //new HiddenMarkovModelState<DiscreteDistribution>(startDistribution, tpm, emissions)); Assert.AreEqual(-1.3665309502789404, res); Assert.AreEqual(0d, algo.Beta[3][0]); Assert.AreEqual(0d, algo.Beta[3][1]); Assert.AreEqual(-0.75502258427803293, algo.Beta[2][0]); Assert.AreEqual(-0.71334988787746456, algo.Beta[2][1]); Assert.AreEqual(-1.3621872857766575, algo.Beta[1][0]); Assert.AreEqual(-1.3915079281727778, algo.Beta[1][1]); Assert.AreEqual(-2.0163315403910613, algo.Beta[0][0]); Assert.AreEqual(-2.0613580382895655, algo.Beta[0][1]); }
public void TestBackwardRun2() { var states = new List <IState> { new State(0, "s"), new State(1, "t") }; var startDistribution = new [] { 0.85, 0.15 }; var tpm = new double[2][]; tpm[0] = new [] { 0.3, 0.7 }; tpm[1] = new [] { 0.1, 0.9 }; var observations = new List <IObservation> { new Observation(new double[] { 0 }, "A"), new Observation(new double[] { 1 }, "B"), new Observation(new double[] { 1 }, "B"), new Observation(new double[] { 0 }, "A") }; var emissions = new DiscreteDistribution[2]; emissions[0] = new DiscreteDistribution(new double[] { 0, 1 }, new[] { 0.4, 0.6 }); emissions[1] = new DiscreteDistribution(new double[] { 0, 1 }, new[] { 0.5, 0.5 }); var algo = new ForwardBackward(false); var res = algo.RunBackward(observations, HiddenMarkovModelStateFactory.GetState(new ModelCreationParameters <DiscreteDistribution>() { Pi = startDistribution, TransitionProbabilityMatrix = tpm, Emissions = emissions })); //new HiddenMarkovModelState<DiscreteDistribution>(startDistribution, tpm, emissions)); Assert.AreEqual(0.25499, res); Assert.AreEqual(1, algo.Beta[3][0]); Assert.AreEqual(1, algo.Beta[3][1]); Assert.AreEqual(0.47, algo.Beta[2][0]); Assert.AreEqual(0.49, algo.Beta[2][1]); Assert.AreEqual(0.2561, algo.Beta[1][0]); Assert.AreEqual(0.2487, algo.Beta[1][1]); Assert.AreEqual(0.133143, algo.Beta[0][0]); Assert.AreEqual(0.127281, algo.Beta[0][1]); }
public MonitorEnter_CalFut( StrategyNode node, Detail.ProductType pt, String fut1, String spread, String fut2, ForwardBackward fb) { this._node = node; IMarketDataBoard board = null; switch (pt) { case Detail.ProductType.KospiFuture: board = RmdManager.Ins().KospiFuture; break; case Detail.ProductType.UsdFuture: board = RmdManager.Ins().Future; break; case Detail.ProductType.KtbFuture: board = RmdManager.Ins().Future; break; default: String errorMsg = String.Format("{0} is not valid type", pt); logger.Error(errorMsg); Util.KillWithNotice(errorMsg); break; } _refRmdFut1 = board.GetData(fut1); _refRmdSpread = board.GetData(spread); _refRmdFut2 = board.GetData(fut2); _referenceRmds.Add(_refRmdFut1); _referenceRmds.Add(_refRmdSpread); _referenceRmds.Add(_refRmdFut2); STR_Arb root = node.Root as STR_Arb; _futureAccount = root.FutureAccount; }
private IList <ObservationWithLikelihood <double[]> > FindMostSimilarObservations <TDistribution>(IHiddenMarkovModel <TDistribution> model, double[][] trainingSet, double yesterdayLikelihood, double tolerance) where TDistribution : IDistribution { var N = trainingSet.Length; var guessess = new List <ObservationWithLikelihood <double[]> >(); var forwardBackward = new ForwardBackward(model.Normalized); for (var n = N - 2; n > 0; n--) { var x = Helper.Convert(new[] { trainingSet[n] }); var likelihood = forwardBackward.RunForward(x, model); //Debug.Write((new Vector(observations[n])).ToString() + " : " + likelihood + " " + Environment.NewLine); if (Math.Abs(yesterdayLikelihood) - tolerance < Math.Abs(likelihood) && Math.Abs(yesterdayLikelihood) + tolerance > Math.Abs(likelihood)) { guessess.Add(new ObservationWithLikelihood <double[]>() { LogLikelihood = likelihood, Observation = trainingSet[n], PlaceInSequence = n - 1 }); } } return(guessess); }
public IHiddenMarkovModel <Mixture <IMultivariateDistribution> > Run(int maxIterations, double likelihoodTolerance) { // Initialize responce object var forwardBackward = new ForwardBackward(Normalized); do { maxIterations--; if (!_estimatedModel.Likelihood.EqualsTo(0)) { _currentModel = HiddenMarkovModelStateFactory.GetState(new ModelCreationParameters <Mixture <IMultivariateDistribution> > { Pi = _estimatedPi, TransitionProbabilityMatrix = _estimatedTransitionProbabilityMatrix, Emissions = _estimatedEmissions }); //new HiddenMarkovModelState<Mixture<IMultivariateDistribution>>(_estimatedPi, _estimatedTransitionProbabilityMatrix, _estimatedEmissions) { LogNormalized = _estimatedModel.LogNormalized }; _currentModel.Normalized = Normalized; _currentModel.Likelihood = _estimatedModel.Likelihood; } // Run Forward-Backward procedure forwardBackward.RunForward(_observations, _currentModel); forwardBackward.RunBackward(_observations, _currentModel); // Calculate Gamma and Xi var @params = new MixtureSigmaEstimationParameters <Mixture <IMultivariateDistribution> > { Alpha = forwardBackward.Alpha, Beta = forwardBackward.Beta, Observations = _observations, Model = _currentModel, Normalized = _currentModel.Normalized, L = _currentModel.Emission[0].Components.Length, ObservationWeights = _observationWeights }; _gammaEstimator = new GammaEstimator <Mixture <IMultivariateDistribution> >(); _ksiEstimator = new KsiEstimator <Mixture <IMultivariateDistribution> >(); var mixtureCoefficientsEstimator = new MixtureCoefficientsEstimator <Mixture <IMultivariateDistribution> >(); var mixtureMuEstimator = new MixtureMuEstimator <Mixture <IMultivariateDistribution> >(); // Mean var mixtureSigmaEstimator = new MixtureSigmaEstimator <Mixture <IMultivariateDistribution> >(); // Covariance var mixtureGammaEstimator = new MixtureGammaEstimator <Mixture <IMultivariateDistribution> >(); @params.Gamma = _gammaEstimator.Estimate(@params); @params.GammaComponents = mixtureGammaEstimator.Estimate(@params); EstimatePi(_gammaEstimator.Estimate(@params)); // TODO : weights for A EstimateTransitionProbabilityMatrix(_gammaEstimator.Estimate(@params), _ksiEstimator.Estimate(@params), _observationWeights, _observations.Count); for (var n = 0; n < _currentModel.N; n++) { var mixturesComponents = _currentModel.Emission[n].Coefficients.Length; var distributions = new IMultivariateDistribution[mixturesComponents]; // Calculate coefficients for state n // TODO : weights for W var coefficients = mixtureCoefficientsEstimator.Estimate(@params)[n]; if (Normalized) { mixtureCoefficientsEstimator.Denormalize(); } // TODO : weights Mu @params.Mu = mixtureMuEstimator.Estimate(@params); for (var l = 0; l < mixturesComponents; l++) { // TODO : weights Sigma distributions[l] = new NormalDistribution(mixtureMuEstimator.Estimate(@params)[n, l], mixtureSigmaEstimator.Estimate(@params)[n, l]); } _estimatedEmissions[n] = new Mixture <IMultivariateDistribution>(coefficients, distributions); } _estimatedModel = HiddenMarkovModelStateFactory.GetState(new ModelCreationParameters <Mixture <IMultivariateDistribution> > { Pi = _estimatedPi, TransitionProbabilityMatrix = _estimatedTransitionProbabilityMatrix, Emissions = _estimatedEmissions }); _estimatedModel.Normalized = Normalized; _estimatedModel.Likelihood = forwardBackward.RunForward(_observations, _estimatedModel); _likelihoodDelta = Math.Abs(Math.Abs(_currentModel.Likelihood) - Math.Abs(_estimatedModel.Likelihood)); Debug.WriteLine("Iteration {3} , Current {0}, Estimate {1} Likelihood delta {2}", _currentModel.Likelihood, _estimatedModel.Likelihood, _likelihoodDelta, maxIterations); }while (_currentModel != _estimatedModel && maxIterations > 0 && _likelihoodDelta > likelihoodTolerance); return(_estimatedModel); }