/// <summary> /// Creates a new object that is a copy of the current instance. /// </summary> /// /// <returns> /// A new object that is a copy of this instance. /// </returns> /// public override object Clone() { double[,] A = (double[, ])LogTransitions.Clone(); double[,] B = (double[, ])Emissions.Clone(); double[] pi = (double[])LogInitial.Clone(); return(new HiddenMarkovModel(A, B, pi, logarithm: true)); }
/// <summary> /// Backward algorithm (without scaling) /// </summary> /// <param name="observations">A sequence of observations.</param> /// <returns></returns> private double backward(int[] observations) { if (observations == null) { throw new ArgumentNullException("observations"); } if (tempInstancts == null) { throw new ArgumentNullException("tempInstancts"); } int T = observations.Length; // For beta variables, I use the same scale factors // for each time t as I used for alpha variables. // 1. Initialization for (int i = 0; i < States; i++) { tempInstancts[T - 1].Beta[i] = 1.0 / tempInstancts[T - 1].c; } // 2. Induction for (int t = T - 2; t >= 0; t--) { for (int i = 0; i < States; i++) { double sum = 0.0; for (int j = 0; j < States; j++) { sum += (Transitions.getValue(i, j) * Emissions.getValue(j, observations[t + 1]) * tempInstancts[t + 1].Beta[j]); } tempInstancts[t].Beta[i] += sum / tempInstancts[t].c; } } // 3. Termination double POGivenLambdaScaled = 0.0; for (int i = 0; i < Probabilities.Length; i++) { POGivenLambdaScaled += Probabilities[i] * Emissions.getValue(i, observations[0]) * tempInstancts[0].Beta[i]; } double scaling = 1.0; for (int t = 0; t < T; t++) { scaling *= tempInstancts[t].c; } var POGivenLambda = POGivenLambdaScaled * scaling; return(POGivenLambda); }
public Emissions OBU1_GetVehicleEmissions(string address, string vehID) { if (string.IsNullOrEmpty(address)) { address = "http://127.0.0.1:8080/TRAAS_WS"; //default address } obu1service.TraasReference.ServiceImplClient client = new obu1service.TraasReference.ServiceImplClient(); client.Endpoint.Address = new EndpointAddress(address); try { client.Open(); if (isVehicleActive(address, vehID)) { Emissions emissions = new Emissions(); emissions.CO = client.Vehicle_getCOEmission(vehID); emissions.CO2 = client.Vehicle_getCO2Emission(vehID); emissions.HC = client.Vehicle_getHCEmission(vehID); emissions.Noise = client.Vehicle_getNoiseEmission(vehID); emissions.NOx = client.Vehicle_getNOxEmission(vehID); emissions.PMx = client.Vehicle_getPMxEmission(vehID); return(emissions); } throw new FaultException("Vehicle with that ID is not in simulation."); } catch (FaultException e) { client.Abort(); throw new FaultException(e.Message); } catch (Exception e) { throw new FaultException(e.InnerException.ToString()); } finally { client.Close(); } }
/// <summary> /// Determining the frequency of the transition-emission pair values /// and dividing it by the probability of the entire string. /// /// Using the scaled values, I don't need to divide it for the /// probability of the entire string anymore /// </summary> /// <param name="t">Current time.</param> /// <param name="nextObservation">Observation at time t + 1.</param> /// <returns>Probability of transition from each state to any other at time t</returns> private MySparse2DMatrix calcXi(int t, int nextObservation) { if (tempInstancts == null) { throw new ArgumentNullException("tempInstancts"); } if (tempInstancts[t].Alpha == null || tempInstancts[t].Beta == null) { throw new ArgumentNullException("Alpha and Beta aren't set"); } MySparse2DMatrix currentXi = new MySparse2DMatrix(); double s = 0; for (int i = 0; i < States; i++) { for (int j = 0; j < States; j++) { double temp = tempInstancts[t].Alpha[i] * Transitions.getValue(i, j) * Emissions.getValue(j, nextObservation) * tempInstancts[t + 1].Beta[j]; s += temp; if (temp != 0) { currentXi.setValue(i, j, temp); } } } if (s != 0) // Scaling { for (int i = 0; i < States; i++) { for (int j = 0; j < States; j++) { if (currentXi.getValue(i, j) != 0) { currentXi.setValue(i, j, currentXi.getValue(i, j) / s); } } } } return(currentXi); }
/// <summary> /// Runs the Baum-Welch learning algorithm for hidden Markov models. /// </summary> /// <remarks> /// Learning problem. Given a training observation sequence O = {o1, o2, ..., oK} /// and general structure of HMM (numbers of hidden and visible states), determine /// HMM parameters M = (A, B, pi) that best fit training data. /// </remarks> /// <para> /// The Baum–Welch algorithm is a particular case of a generalized expectation-maximization /// (GEM) algorithm. It can compute maximum likelihood estimates and posterior mode estimates /// for the parameters (transition and emission probabilities) of an HMM, when given only /// emissions as training data. /// </para> /// /// <para> /// The algorithm has two steps: /// - Calculating the forward probability and the backward probability for each HMM state; /// - On the basis of this, determining the frequency of the transition-emission pair values /// and dividing it by the probability of the entire string. This amounts to calculating /// the expected count of the particular transition-emission pair. Each time a particular /// transition is found, the value of the quotient of the transition divided by the probability /// of the entire string goes up, and this value can then be made the new value of the transition. /// </para> /// <param name="observations">A sequence of observations.</param> /// <returns></returns> private void baum_welch(int[][] observations) { if (observations == null) { throw new ArgumentNullException("observations"); } int N = observations.Length; var multipleTempInstants = new TemporalState[N][]; for (int k = 0; k < N; k++) { // 1. Calculating the forward probability and the // backward probability for each HMM state. InitializeObservation(observations[k]); forward(observations[k]); backward(observations[k]); int T = observations[k].Length; // 2. Determining the frequency of the transition-emission pair values // and dividing it by the probability of the entire string. // Calculate gamma values update(T); // Calculate xi for (int t = 0; t < T - 1; t++) { tempInstancts[t].Xi = calcXi(t, observations[k][t + 1]); } multipleTempInstants[k] = tempInstancts; } // 3. Continue with parameter re-estimation // 3.1 Re-estimation of initial state probabilities for (int i = 0; i < States; i++) { double sum = 0; for (int k = 0; k < N; k++) { sum += multipleTempInstants[k][0].Gamma[i]; } Probabilities[i] = sum / N; } // 3.2 Re-estimation of transition probabilities for (int i = 0; i < States; i++) { for (int j = 0; j < States; j++) { double den = 0, num = 0; for (int k = 0; k < N; k++) { int T = observations[k].Length; for (int t = 0; t < T - 1; t++) { num += multipleTempInstants[k][t].Xi.getValue(i, j); den += multipleTempInstants[k][t].Gamma[i]; } } // Remove from the matrix if needed if (den != 0) { double result = num / den; if (result != 0) { Transitions.setValue(i, j, result); } else { Transitions.Remove(i, j); } } else { Transitions.Remove(i, j); } } } // 3.3 Re-estimation of emission probabilities for (int i = 0; i < States; i++) { for (int j = 0; j < Symbols; j++) { double den = 0, num = 0; for (int k = 0; k < N; k++) { int T = observations[k].Length; for (int t = 0; t < T; t++) { if (observations[k][t] == j) { num += multipleTempInstants[k][t].Gamma[i]; } } for (int t = 0; t < T; t++) { den += multipleTempInstants[k][t].Gamma[i]; } } // Remove from the matrix if needed if (num != 0) { Emissions.setValue(i, j, num / den); } else { Emissions.Remove(i, j); } } } }
/// <summary> /// Calculates the most likely sequence of hidden states /// that produced the given observation sequence. /// </summary> /// <remarks> /// Decoding problem. Given the HMM M = (A, B, pi) and the observation sequence /// O = {o1,o2, ..., oK}, calculate the most likely sequence of hidden states Si /// that produced this observation sequence O. This can be computed efficiently /// using the Viterbi algorithm. /// </remarks> /// <param name="observations">A sequence of observations.</param> /// <param name="probability">The state optimized probability.</param> /// <returns>The sequence of states that most likely produced the sequence.</returns> private int[] viterbi(int[] observations, out double probability) { if (observations == null) { throw new ArgumentNullException("observations"); } if (tempInstancts == null) { throw new ArgumentNullException("tempInstancts"); } if (observations.Length == 0) { probability = 0.0; return(new int[0]); } int T = observations.Length; double maxWeight; int maxState; // 1. Base for (int i = 0; i < States; i++) { tempInstancts[0].Delta[i] = Probabilities[i] * Emissions.getValue(i, observations[0]); } // 2. Induction for (int t = 1; t < T; t++) { for (int i = 0; i < States; i++) { maxWeight = 0.0; maxState = 0; for (int k = 0; k < States; k++) { double weight = tempInstancts[t - 1].Delta[k] * Transitions.getValue(k, i); if (weight > maxWeight) { maxWeight = weight; maxState = k; } } tempInstancts[t].Delta[i] = maxWeight * Emissions.getValue(i, observations[t]); tempInstancts[t].State[i] = maxState; } } // Find maximum value for time T-1 maxWeight = tempInstancts[T - 1].Delta[0]; maxState = 0; for (int k = 1; k < States; k++) { if (tempInstancts[T - 1].Delta[k] > maxWeight) { maxWeight = tempInstancts[T - 1].Delta[k]; maxState = k; } } // Trackback int[] path = new int[T]; path[T - 1] = maxState; for (int t = T - 2; t >= 0; t--) { path[t] = tempInstancts[t + 1].State[path[t + 1]]; } // Returns the sequence probability as an out parameter probability = maxWeight; // Returns the most likely (Viterbi path) for the given sequence return(path); }
/// <summary> /// Forward algorithm (with scaling) /// </summary> /// <param name="observations">A sequence of observations.</param> /// <returns></returns> private double forward(int[] observations) { if (observations == null) { throw new ArgumentNullException("observations"); } if (tempInstancts == null) { throw new ArgumentNullException("tempInstancts"); } int T = observations.Length; // 1. Initialization for (int i = 0; i < States; i++) { tempInstancts[0].c += tempInstancts[0].Alpha[i] = Probabilities[i] * Emissions.getValue(i, observations[0]); } if (tempInstancts[0].c != 0) // Scaling CHECK { for (int i = 0; i < States; i++) { tempInstancts[0].Alpha[i] = tempInstancts[0].Alpha[i] / tempInstancts[0].c; } } // 2. Induction for (int t = 1; t < T; t++) { for (int i = 0; i < States; i++) { double sum = 0.0; for (int k = 0; k < States; k++) { sum += (tempInstancts[t - 1].Alpha[k] * Transitions.getValue(k, i)); } tempInstancts[t].Alpha[i] = sum * Emissions.getValue(i, observations[t]); tempInstancts[t].c += tempInstancts[t].Alpha[i]; // Scaling coefficient } if (tempInstancts[t].c != 0) // Scaling { for (int i = 0; i < States; i++) { tempInstancts[t].Alpha[i] = tempInstancts[t].Alpha[i] / tempInstancts[t].c; } } } // 3. Termination double POGivenLambdaScaled = 0.0; for (int i = 0; i < Probabilities.Length; i++) { POGivenLambdaScaled += tempInstancts[T - 1].Alpha[i]; } double scaling = 1.0; for (int t = 0; t < T; t++) { scaling *= tempInstancts[t].c; } var POGivenLambda = POGivenLambdaScaled * scaling; return(POGivenLambda); }
public void Add(Emissions e) { EM += e.EM; TH += e.TH; TCS += e.TCS; }
private async void ImportELCD() { var folder = new FolderBrowserDialog(); if (folder.ShowDialog() != DialogResult.OK) { return; } Progress = 0; var processes = new DirectoryInfo(folder.SelectedPath + "\\processes"); if (!processes.Exists) { return; } var flowdic = new DirectoryInfo(folder.SelectedPath + "\\flows"); if (!flowdic.Exists) { return; } MaxProgress = processes.EnumerateFiles("*.xml").Count() * 3; ProcessesList.Clear(); var processSerializer = new XmlSerializer(typeof(ProcessDataSetType)); var flowSerializer = new XmlSerializer(typeof(FlowDataSetType)); var dis = Dispatcher.CurrentDispatcher; var db = DatabaseConnection.GetModelContext(); db.ConsumablesEmission.DeleteAllOnSubmit(db.ConsumablesEmission); db.SubmitChanges(); db.ConsumableBase.DeleteAllOnSubmit(db.ConsumableBase); db.Emissions.DeleteAllOnSubmit(db.Emissions); db.SubmitChanges(); var consumablesEmissionLocal = db.ConsumablesEmission.ToList(); var emissionLocal = db.Emissions.ToList(); var consumableLocal = db.ConsumableBase.ToList(); int emissionIndex = emissionLocal.Any() ? emissionLocal.Max(t => t.Id) + 1 : 1; int consumableIndex = consumableLocal.Any() ? consumableLocal.Max(t => t.Id) + 1 : 1; var task = new Task(() => { ConcurrentDictionary <string, FlowDataSetType> flows = new ConcurrentDictionary <string, FlowDataSetType>(); Parallel.ForEach(processes.EnumerateFiles("*.xml"), (file) => { var deserializedProcess = ProcessDataSetType(file, processSerializer); dis.Invoke(() => { Progress += 1; ProcessesList.Add(deserializedProcess); }); foreach (var exchangeType in deserializedProcess.exchanges.exchange) { if (!flows.ContainsKey(exchangeType.referenceToFlowDataSet.refObjectId)) { flows.TryAdd(exchangeType.referenceToFlowDataSet.refObjectId, FlowSetType(flowdic + exchangeType.referenceToFlowDataSet.uri.Remove(0, 8), flowSerializer)); } } }); foreach (var flowDataSetType in flows) { Emissions emission = null; emission = new Emissions() { Name = flowDataSetType.Value.flowInformation.dataSetInformation.name.baseName .FirstOrDefault().Value, Id = emissionIndex++, Unit = flowDataSetType.Value.flowInformation.dataSetInformation.sumFormula }; if (!emissionLocal.AsParallel().Any(t => t.Name == emission.Name)) { emissionLocal.Add(emission); } } foreach (var deserializedProcess in ProcessesList) { var name = deserializedProcess.processInformation.dataSetInformation.name.baseName.FirstOrDefault(); string unit = ""; try { unit = deserializedProcess.processInformation.dataSetInformation.name .functionalUnitFlowProperties .FirstOrDefault().Value; } catch { } dis.Invoke(() => { Progress += 1; }); ConsumableBase cons = null; { cons = consumableLocal.FirstOrDefault(t => name != null && t.Name == name.Value); if (cons == null) { cons = new ConsumableBase() { Name = name == null ? "" : name.Value, Unit = unit, Id = consumableIndex++ }; consumableLocal.Add(cons); } } dis.Invoke(() => { Progress += 1; }); foreach (var exchange in deserializedProcess.exchanges.exchange) { var emissionN = exchange.referenceToFlowDataSet.shortDescription.FirstOrDefault(); if (emissionN != null) { var emissionName = emissionN.Value; Emissions emission = emissionLocal.AsParallel().FirstOrDefault(t => t.Name == emissionName); if (emission == default(Emissions)) { emission = new Emissions() { Name = emissionName, Id = emissionIndex++ }; emissionLocal.Add(emission); } if ( !consumablesEmissionLocal.AsParallel().Any( t => t.Consumable == cons.Id && t.Emission == emission.Id)) { consumablesEmissionLocal.Add(new ConsumablesEmission() { Consumable = cons.Id, Emission = emission.Id, Value = exchange.resultingAmount }); } } } } }); task.Start(); await task; db.ConsumablesEmission.DeleteAllOnSubmit(db.ConsumablesEmission); db.SubmitChanges(); db.ConsumableBase.DeleteAllOnSubmit(db.ConsumableBase); db.Emissions.DeleteAllOnSubmit(db.Emissions); db.SubmitChanges(); db.ConsumableBase.InsertAllOnSubmit(consumableLocal); db.Emissions.InsertAllOnSubmit(emissionLocal); db.SubmitChanges(); db.ConsumablesEmission.InsertAllOnSubmit(consumablesEmissionLocal); db.SubmitChanges(); }