public TransitionMatrix GenerateTransitionMatrix(TransitionMatrixInput inputParameters, ProductStructureInput inputProductStructure, XRandom rng) { var jCorrector = 0; var matrixSize = inputParameters.WorkingStations.Count; if (inputParameters.ExtendedTransitionMatrix) { matrixSize += 1; jCorrector = 1; } _piA = new double[matrixSize, matrixSize]; _piB = new double[matrixSize, matrixSize]; InitializePiA(inputParameters, rng, matrixSize, jCorrector); InitializePiB(inputParameters, inputProductStructure, matrixSize, jCorrector); while (Math.Abs(_organizationDegreeA - inputParameters.DegreeOfOrganization) > 0.001) { Bisection(inputParameters, matrixSize); } var transitionMatrix = new TransitionMatrix { Pi = _piA }; return(transitionMatrix); }
/// <summary> /// Predicts the next state using the current state and <paramref name="controlVector"/>. /// </summary> /// <param name="controlVector">Set of data for external system control.</param> /// <summary> /// <para>Estimates the subsequent model state.</para> /// <para> /// x'(k) = A * x(k-1) + B * u(k). /// P'(k) = A * P(k-1) * At + Q /// K(k) = P'(k) * Ht * (H * P'(k) * Ht + R)^(-1) /// </para> /// </summary> public void Predict(double[] controlVector) { CheckPrerequisites(); //x'(k) = A * x(k-1) //this.state = this.state.Multiply(this.TransitionMatrix); state = TransitionMatrix.Dot(state); //x'(k) = x'(k) + B * u(k) if (controlVector is not null) { state = state.Add(ControlMatrix.Dot(controlVector)); } //P'(k) = A * P(k-1) * At + Q EstimateCovariance = TransitionMatrix.Multiply(EstimateCovariance).Multiply(TransitionMatrix.Transpose()).Add(ProcessNoise); /******* calculate Kalman gain **********/ var measurementMatrixTransponsed = MeasurementMatrix.Transpose(); //S(k) = H * P'(k) * Ht + R ResidualCovariance = MeasurementMatrix.Multiply(EstimateCovariance).Multiply(measurementMatrixTransponsed).Add(MeasurementNoise); ResidualCovarianceInv = ResidualCovariance.Inverse(); //K(k) = P'(k) * Ht * S(k)^(-1) KalmanGain = EstimateCovariance.Multiply(measurementMatrixTransponsed).Multiply(ResidualCovarianceInv); /******* calculate Kalman gain **********/ }
public override void renderEvent(TransitionMatrix tm) { from_heme.setOccupied(false); to_heme.setOccupied(true); tm.setAnimationState(from_hemeIdx, false); tm.setAnimationState(to_hemeIdx, true); }
/*Event Handler*/ public override void processEvent(TransitionMatrix tm) { tm.setCurrentState(from_hemeIdx, false); tm.setCurrentState(to_hemeIdx, true); from_heme = tm.getHemeObject(from_hemeIdx); to_heme = tm.getHemeObject(to_hemeIdx); }
// Use this for initialization void Start() { initHemes(location_filename); tm = new TransitionMatrix(tm_filename, my_babies.ToArray()); Thread thread = new Thread(tm.run); thread.Start(); }
private void Prepare(TransitionMatrix transitionMatrix, TransitionMatrixInput inputTransitionMatrix, XRandom rng) { _matrixSize = inputTransitionMatrix.WorkingStations.Count; TruncatedDiscreteNormal unifyingDistribution = null; //darf lowerBound (also Mindestdauer einer Operation) 0 sein? -> wenn 0 selten vorkommt (also z.B. Zeiteinheit nicht Minuten, sondern Sekunden sind), dann ok if (inputTransitionMatrix.GeneralMachiningTimeParameterSet != null) { var normalDistribution = Normal.WithMeanVariance( inputTransitionMatrix.GeneralMachiningTimeParameterSet.MeanMachiningTime, inputTransitionMatrix.GeneralMachiningTimeParameterSet.VarianceMachiningTime, rng.GetRng()); unifyingDistribution = new TruncatedDiscreteNormal(0, null, normalDistribution); } var workingStations = inputTransitionMatrix.WorkingStations.ToArray(); for (var i = 0; i < _matrixSize; i++) { TruncatedDiscreteNormal truncatedDiscreteNormalDistribution; if (unifyingDistribution != null) { truncatedDiscreteNormalDistribution = unifyingDistribution; } else { var machiningTime = workingStations[i].MachiningTimeParameterSet; var normalDistribution = Normal.WithMeanVariance(machiningTime.MeanMachiningTime, machiningTime.VarianceMachiningTime, rng.GetRng()); truncatedDiscreteNormalDistribution = new TruncatedDiscreteNormal(0, null, normalDistribution); } _machiningTimeDistributions.Add(truncatedDiscreteNormalDistribution); } if (inputTransitionMatrix.ExtendedTransitionMatrix) { _matrixSize++; } for (var i = 0; i < _matrixSize; i++) { var row = new List <KeyValuePair <int, double> >(); _cumulatedProbabilities.Add(row); for (var j = 0; j < _matrixSize; j++) { row.Add(new KeyValuePair <int, double>(j, transitionMatrix.Pi[i, j])); } row.Sort(delegate(KeyValuePair <int, double> o1, KeyValuePair <int, double> o2) { if (o1.Value > o2.Value) { return(-1); } return(1); }); } }
public void TestNullArgumentConstructor() { // Execute TransitionMatrix transition_matrix; Assert.Throws <ArgumentNullException>( "transitionMatrix", () => transition_matrix = new TransitionMatrix(null) ); }
/// <summary> /// Esta es toda la lógica de la máquina de estados /// </summary> /// <param name="action">Qué acción aplicar sobre el estado actual.</param> /// <returns>True, si aplicó la acción; False si es una acción no válida.</returns> internal bool GoNext(Action action) { var trans = TransitionMatrix.FirstOrDefault(o => o.CurrentState == CurrentState && o.Action == action); if (trans == null) { return(false); } CurrentState = trans.NewState; return(true); }
public void TestConstructorNullArgument2() { // Prepare datas setting_TransitionMatrixData = null; // Execute & Validate StateMachine state_machine; Assert.Throws <ArgumentNullException>( "matrixData", () => state_machine = new StateMachine(setting_InitialState, setting_TransitionMatrixData)); }
private void Awake() { file = GetComponent <MidiSource>().MidiClip; midiFile = new cwMidi.MidiFile(file); midiTrack = new MidiTrack(); midiOutputDevice = MidiPlayer.Start(); if (Midi.debugLevel > 3) { midiFile.printCookedMidiFile(); } matrix = new TransitionMatrix(midiFile.getMidiTrack(0)); midiSource = GetComponent <MidiSource>(); }
private void GeroMachineSampleForm_Load(object sender, EventArgs e) { AllTriggers = new Trigger[3] { new Trigger(), new Trigger(), new Trigger() }; State[] all_states = new State[3] { new NormalState(AllTriggers), new NormalState(AllTriggers), new NormalState(AllTriggers) }; all_states[0].StateName = "State 1"; all_states[1].StateName = "State 2"; all_states[2].StateName = "State 3"; var matrixData = new Dictionary <State, Dictionary <Trigger, ITransition> > { { all_states[0], new Dictionary <Trigger, ITransition> { { AllTriggers[0], new Transition(all_states[1], null) }, { AllTriggers[1], new Transition(all_states[2], null) }, } }, { all_states[1], new Dictionary <Trigger, ITransition> { { AllTriggers[0], new Transition(all_states[2], null) }, { AllTriggers[1], new Transition(all_states[2], null) }, { AllTriggers[2], new Transition(all_states[0], null) } } }, { all_states[2], new Dictionary <Trigger, ITransition> { { AllTriggers[0], new Transition(all_states[2], null) }, { AllTriggers[2], new Transition(all_states[0], null) } } } }; TransitionMatrix transitino_matrix = new TransitionMatrix(matrixData); MainStateMachine = new StateMachine(all_states[0], transitino_matrix); CurrentStateNameLabel.Text = MainStateMachine.CurrentStateName; }
public void VerifyGeneratedData(TransitionMatrix transitionMatrix, List <Dictionary <long, Node> > nodesPerLevel, MasterTableResourceCapability capabilities) { var actualTransitionMatrix = new TransitionMatrix { Pi = new double[capabilities.ParentCapabilities.Count + 1, capabilities.ParentCapabilities.Count + 1] }; for (var i = 0; i < nodesPerLevel.Count - 1; i++) { foreach (var article in nodesPerLevel[i].Values) { var operationCount = 0; var lastCapPos = 0; do { var capPos = capabilities.ParentCapabilities.FindIndex(x => object.ReferenceEquals(x, article.Operations[operationCount].MOperation.ResourceCapability.ParentResourceCapability)); actualTransitionMatrix.Pi[lastCapPos, capPos]++; lastCapPos = capPos + 1; operationCount++; } while (operationCount < article.Operations.Count); actualTransitionMatrix.Pi[lastCapPos, capabilities.ParentCapabilities.Count]++; } } for (var i = 0; i <= capabilities.ParentCapabilities.Count; i++) { var sum = 0.0; for (var j = 0; j <= capabilities.ParentCapabilities.Count; j++) { sum += actualTransitionMatrix.Pi[i, j]; } for (var j = 0; j <= capabilities.ParentCapabilities.Count; j++) { actualTransitionMatrix.Pi[i, j] /= sum; } } var transitionMatrixGenerator = new TransitionMatrixGenerator(); ActualOrganizationDegree = transitionMatrixGenerator.CalcOrganizationDegree(actualTransitionMatrix.Pi, capabilities.ParentCapabilities.Count + 1); GeneratedOrganizationDegree = transitionMatrixGenerator.CalcOrganizationDegree(transitionMatrix.Pi, capabilities.ParentCapabilities.Count + 1); System.Diagnostics.Debug.WriteLine("################################# Generated work plans have an organization degree of " + ActualOrganizationDegree + " (transition matrix has " + GeneratedOrganizationDegree + ")"); }
internal static void Run() { // http://ocw.mit.edu/courses/aeronautics-and-astronautics/16-410-principles-of-autonomy-and-decision-making-fall-2010/lecture-notes/MIT16_410F10_lec21.pdf var states = new Registry <IState>(); var x1 = states.Add(new NamedState("x1")); var x2 = states.Add(new NamedState("x2")); var x3 = states.Add(new NamedState("x3")); var observations = new Registry <IObservation>(); var o2 = observations.Add(new NamedObservation("o2")); var o3 = observations.Add(new NamedObservation("o3")); // test the HMM with a known graph { var initial = new InitialStateMatrix(states); initial.SetProbability(x1, 1); initial.SetProbability(x2, 0); initial.SetProbability(x3, 0); var transitions = new TransitionMatrix(states); transitions.SetTransition(x1, x1, 0); transitions.SetTransition(x1, x2, 0.5); transitions.SetTransition(x1, x3, 0.5); transitions.SetTransition(x2, x1, 0); transitions.SetTransition(x2, x2, 0.9); transitions.SetTransition(x2, x3, 0.1); transitions.SetTransition(x3, x1, 0); transitions.SetTransition(x3, x2, 0); transitions.SetTransition(x3, x3, 1); var emissions = new EmissionMatrix(states, observations); emissions.SetEmission(x1, o2, 0.5); emissions.SetEmission(x2, o2, 0.9); emissions.SetEmission(x3, o2, 0.1); emissions.SetEmission(x1, o3, 0.5); emissions.SetEmission(x2, o3, 0.1); emissions.SetEmission(x3, o3, 0.9); var hmm = new HiddenMarkovModel(states, initial, transitions, emissions); // expected output: 1, 3, 3, 3, 3, 3, 3, 3, 3 hmm.ApplyViterbiAndPrint(new[] { o2, o3, o3, o2, o2, o2, o3, o2, o3 }); // expected output: 1, 2, 2, 2, 2, 2, 2, 2 hmm.ApplyViterbiAndPrint(new[] { o2, o3, o3, o2, o2, o2, o3, o2 }); } }
private void ValidateParameters() { if (EmissionMatrix == null || TransitionMatrix == null || SequenceOfObservations == null || StateSpace == null || ObservationSpace == null) { throw new ArgumentException("Parameters cannot be null"); } if (ObservationSpace.Length != EmissionMatrix.GetLength(1) || ObservationSpace.Length == 0) { throw new ArgumentException("N should be greater than 0 and consistent"); } #if _USE_ARRAYS_INSTEAD_OF_MATRIX_HASHTABLE if (StateSpace.Length != TransitionMatrix.GetLength(0) || TransitionMatrix.GetLength(0) != TransitionMatrix.GetLength(1) || TransitionMatrix.GetLength(1) != EmissionMatrix.GetLength(0) || EmissionMatrix.GetLength(0) != InitialProbabilitiesOfStates.Length || StateSpace.Length == 0) { throw new ArgumentException("K should be greater than 0 and consistent"); } #else if (StateSpace.Length != TransitionMatrix.GetLength(0) || TransitionMatrix.GetLength(0) != TransitionMatrix.GetLength(1) || TransitionMatrix.GetLength(1) != EmissionMatrix.GetLength(0) || EmissionMatrix.GetLength(0) != InitialProbabilitiesOfStates.Count || StateSpace.Length == 0) { throw new ArgumentException("K should be greater than 0 and consistent"); } #endif if (SequenceOfObservations.Length == 0) { throw new ArgumentException("T should be greater than 0 and consistent"); } if ( StateSpace.Select(state => ObservationSpace.Sum(observation => EmissionMatrix[state, observation])) .Any(sum => sum <= 0.99 || sum >= 1.01)) { throw new ArgumentException("EmissionMatrix has not normalized probabilities"); //Exception("Emi"); } if ( StateSpace.Select(state1 => StateSpace.Sum(state2 => TransitionMatrix[state1, state2])) .Any(sum => sum <= 0.99 || sum >= 1.01)) { throw new ArgumentException("TransitionMatrix has not normalized probabilities"); //Exception("Emi"); } }
public void TestConstructor() { // Prepare datas TransitionMatrix expected_TransitionMatrixData = setting_TransitionMatrixData; State expected_InitialState = setting_InitialState; // Execute StateMachine state_machine = new StateMachine(setting_InitialState, setting_TransitionMatrixData); // Get results FieldInfo field_info = state_machine.GetType().GetField("CurrentState", BindingFlags.GetField | BindingFlags.NonPublic | BindingFlags.Instance); State actual_CurrentState = (State)field_info.GetValue(state_machine); field_info = state_machine.GetType().GetField("TransitionMatrixData", BindingFlags.GetField | BindingFlags.NonPublic | BindingFlags.Instance); TransitionMatrix actual_TransitionMatrixData = (TransitionMatrix)field_info.GetValue(state_machine); // Validate Assert.Same(expected_InitialState, actual_CurrentState); Assert.Same(expected_TransitionMatrixData, actual_TransitionMatrixData); }
/*Do whatever transfer, injection, or ejection just happened*/ public abstract void processEvent(TransitionMatrix tm);
internal static void Run() { // http://www.cs.sfu.ca/~anoop/teaching/CMPT-413-Spring-2014/index.html const double compareEpsilon = 0.000001D; var states = new Registry <IState>(); var adjective = states.Add(new NamedState("A")); var noun = states.Add(new NamedState("N")); var observations = new Registry <IObservation>(); var clown = observations.Add(new NamedObservation("clown")); var killer = observations.Add(new NamedObservation("killer")); var crazy = observations.Add(new NamedObservation("crazy")); var problem = observations.Add(new NamedObservation("problem")); // test the HMM with a known graph { var initial = new InitialStateMatrix(states); initial.SetProbability(adjective, 1 / 3D); initial.SetProbability(noun, 2 / 3D); var transitions = new TransitionMatrix(states); transitions.SetTransition(adjective, adjective, 0); transitions.SetTransition(adjective, noun, 1); transitions.SetTransition(noun, adjective, 0.5); transitions.SetTransition(noun, noun, 0.5); var emissions = new EmissionMatrix(states, observations); emissions.SetEmission(adjective, clown, 0); emissions.SetEmission(adjective, killer, 0); emissions.SetEmission(adjective, problem, 0); emissions.SetEmission(adjective, crazy, 1); emissions.SetEmission(noun, clown, 0.4); emissions.SetEmission(noun, killer, 0.3); emissions.SetEmission(noun, problem, 0.3); emissions.SetEmission(noun, crazy, 0); var hmm = new HiddenMarkovModel(states, initial, transitions, emissions); // P(AA | killer clown) var paa = hmm.GetProbability(killer.As(adjective), clown.As(adjective)); Debug.Assert(Math.Abs(paa) < compareEpsilon); // P(AN | killer clown) var pan = hmm.GetProbability(killer.As(adjective), clown.As(noun)); Debug.Assert(Math.Abs(pan) < compareEpsilon); // P(NN | killer clown) var pnn = hmm.GetProbability(killer.As(noun), clown.As(noun)); Debug.Assert(Math.Abs(0.04 - pnn) < compareEpsilon); // P(NA | killer clown) var pna = hmm.GetProbability(killer.As(noun), clown.As(adjective)); Debug.Assert(Math.Abs(pna) < compareEpsilon); } // test supervised learning of the HMM { var trainingSet = new List <IList <LabeledObservation> > { new[] { killer.As(noun), clown.As(noun) }, new[] { killer.As(noun), problem.As(noun) }, new[] { crazy.As(adjective), problem.As(noun) }, new[] { crazy.As(adjective), clown.As(noun) }, new[] { problem.As(noun), crazy.As(adjective), clown.As(noun) }, new[] { clown.As(noun), crazy.As(adjective), killer.As(noun) }, }; // prepare the matrices var initial = new InitialStateMatrix(states); var transitions = new TransitionMatrix(states); var emissions = new EmissionMatrix(states, observations); // learn the probabilities var learner = new ClassicalBaumWelchLearning(); learner.Learn(initial, transitions, emissions, trainingSet); var hmm = new HiddenMarkovModel(states, initial, transitions, emissions); // P(AA | killer clown) var paa = hmm.GetProbability(killer.As(adjective), clown.As(adjective)); Debug.Assert(Math.Abs(paa) < compareEpsilon); // P(AN | killer clown) var pan = hmm.GetProbability(killer.As(adjective), clown.As(noun)); Debug.Assert(Math.Abs(pan) < compareEpsilon); // P(NN | killer clown) var pnn = hmm.GetProbability(killer.As(noun), clown.As(noun)); Debug.Assert(Math.Abs(0.04 - pnn) < compareEpsilon); // P(NA | killer clown) var pna = hmm.GetProbability(killer.As(noun), clown.As(adjective)); Debug.Assert(Math.Abs(pna) < compareEpsilon); // apply the viterbi algorithm to find the most likely sequence hmm.ApplyViterbiAndPrint(new[] { killer, crazy, clown, problem }); hmm.ApplyViterbiAndPrint(new[] { crazy, killer, clown, problem }); hmm.ApplyViterbiAndPrint(new[] { crazy, clown, killer, crazy, problem }); var p = hmm.Evaluate(new[] { killer, crazy, clown, problem }); } }
public void StartGeneration(Approach approach, MasterDBContext dbContext, ResultContext resultContext, bool doVerify = false) { dbContext.Database.EnsureDeleted(); dbContext.Database.EnsureCreated(); var rng = new XRandom(approach.Seed); var units = new MasterTableUnit(); var unitCol = units.Init(dbContext); var articleTypes = new MasterTableArticleType(); articleTypes.Init(dbContext); var productStructureGenerator = new ProductStructureGenerator(); var productStructure = productStructureGenerator.GenerateProductStructure(approach.ProductStructureInput, articleTypes, units, unitCol, rng); ArticleInitializer.Init(productStructure.NodesPerLevel, dbContext); var articleTable = dbContext.Articles.ToArray(); MasterTableStock.Init(dbContext, articleTable); var transitionMatrixGenerator = new TransitionMatrixGenerator(); TransitionMatrix = transitionMatrixGenerator.GenerateTransitionMatrix(approach.TransitionMatrixInput, approach.ProductStructureInput, rng); List <ResourceProperty> resourceProperties = approach.TransitionMatrixInput.WorkingStations .Select(x => (ResourceProperty)x).ToList(); var resourceCapabilities = ResourceInitializer.Initialize(dbContext, resourceProperties); var operationGenerator = new OperationGenerator(); operationGenerator.GenerateOperations(productStructure.NodesPerLevel, TransitionMatrix, approach.TransitionMatrixInput, resourceCapabilities, rng); OperationInitializer.Init(productStructure.NodesPerLevel, dbContext); var billOfMaterialGenerator = new BillOfMaterialGenerator(); billOfMaterialGenerator.GenerateBillOfMaterial(approach.BomInput, productStructure.NodesPerLevel, TransitionMatrix, units, rng); BillOfMaterialInitializer.Init(productStructure.NodesPerLevel, dbContext); var businessPartner = new MasterTableBusinessPartner(); businessPartner.Init(dbContext); var articleToBusinessPartner = new ArticleToBusinessPartnerInitializer(); articleToBusinessPartner.Init(dbContext, articleTable, businessPartner); if (doVerify) { var productStructureVerifier = new ProductStructureVerifier(); productStructureVerifier.VerifyComplexityAndReutilizationRation(approach.ProductStructureInput, productStructure); if (approach.TransitionMatrixInput.ExtendedTransitionMatrix) { var transitionMatrixGeneratorVerifier = new TransitionMatrixGeneratorVerifier(); transitionMatrixGeneratorVerifier.VerifyGeneratedData(TransitionMatrix, productStructure.NodesPerLevel, resourceCapabilities); } } //##### TEMP var incomingEdgeCount = 0; foreach (var level in productStructure.NodesPerLevel) { foreach (var node in level) { incomingEdgeCount += node.Value.IncomingEdges.Count; } } var actualCR = incomingEdgeCount / (1.0 * (productStructure.NodesCounter - productStructure.NodesPerLevel[^ 1].Count));
public CreationTest() { // Prepare datas Trigger[] all_triggers = new Trigger[5] { new Trigger(), new Trigger(), new Trigger(), new Trigger(), new Trigger() }; Trigger[] TriggerSet1 = new Trigger[3] { all_triggers[0], all_triggers[1], all_triggers[2] }; Trigger[] TriggerSet2 = new Trigger[3] { all_triggers[0], all_triggers[2], all_triggers[4] }; Trigger[] TriggerSet3 = new Trigger[3] { all_triggers[2], all_triggers[3], all_triggers[4] }; State[] all_states = new State[3] { new NormalState(TriggerSet1), new NormalState(TriggerSet2), new NormalState(TriggerSet3) }; var MatrixData = new Dictionary <State, Dictionary <Trigger, ITransition> >() { { all_states[0], new Dictionary <Trigger, ITransition>() { { TriggerSet1[0], new Transition(all_states[2], null) }, { TriggerSet1[1], new Transition(all_states[2], null) }, { TriggerSet1[2], new Transition(all_states[1], null) } } }, { all_states[1], new Dictionary <Trigger, ITransition>() { { TriggerSet2[0], new Transition(all_states[0], null) }, { TriggerSet2[1], new Transition(all_states[1], null) }, { TriggerSet2[2], new Transition(all_states[2], null) } } }, { all_states[2], new Dictionary <Trigger, ITransition>() { { TriggerSet3[0], new Transition(all_states[2], null) }, { TriggerSet3[1], new Transition(all_states[2], null) }, { TriggerSet3[2], new Transition(all_states[0], null) } } } }; setting_TransitionMatrixData = new TransitionMatrix(MatrixData); setting_InitialState = all_states[0]; }
public RegularInstanceTest() { All_Triggers = new Trigger[5] { new Trigger(), new Trigger(), new Trigger(), new Trigger(), new Trigger() }; Trigger[] TriggerSet1 = new Trigger[3] { All_Triggers[0], All_Triggers[1], All_Triggers[2] }; Trigger[] TriggerSet2 = new Trigger[3] { All_Triggers[0], All_Triggers[2], All_Triggers[4] }; Trigger[] TriggerSet3 = new Trigger[3] { All_Triggers[2], All_Triggers[3], All_Triggers[4] }; All_States = new State[3] { new NormalState(TriggerSet1), new NormalState(TriggerSet2), new NormalState(TriggerSet3) }; setting_MatrixData = new Dictionary <State, Dictionary <Trigger, ITransition> >() { { All_States[0], new Dictionary <Trigger, ITransition>() { { TriggerSet1[0], new Transition(All_States[2], null) }, { TriggerSet1[1], new Transition(All_States[2], null) }, { TriggerSet1[2], new Transition(All_States[1], null) } } }, { All_States[1], new Dictionary <Trigger, ITransition>() { { TriggerSet2[0], new Transition(All_States[0], null) }, { TriggerSet2[1], new Transition(All_States[1], null) }, { TriggerSet2[2], new Transition(All_States[2], null) } } }, { All_States[2], new Dictionary <Trigger, ITransition>() { { TriggerSet3[0], new Transition(All_States[2], null) }, { TriggerSet3[1], new Transition(All_States[2], null) }, { TriggerSet3[2], new Transition(All_States[0], null) } } } }; TransitionMatrixInstance = new TransitionMatrix(setting_MatrixData); }
public void TestConstructor() { // Prepare datas Trigger[] all_triggers = new Trigger[5] { new Trigger(), new Trigger(), new Trigger(), new Trigger(), new Trigger() }; TriggerSet1 = new Trigger[3] { all_triggers[0], all_triggers[1], all_triggers[2] }; TriggerSet2 = new Trigger[3] { all_triggers[0], all_triggers[2], all_triggers[4] }; TriggerSet3 = new Trigger[3] { all_triggers[2], all_triggers[3], all_triggers[4] }; State[] states = new State[3] { new NormalState(TriggerSet1), new NormalState(TriggerSet2), new NormalState(TriggerSet3) }; var input_transitionMatrix = new Dictionary <State, Dictionary <Trigger, ITransition> >() { { states[0], new Dictionary <Trigger, ITransition>() { { TriggerSet1[0], new Transition(states[2], null) }, { TriggerSet1[1], new Transition(states[2], null) }, { TriggerSet1[2], new Transition(states[1], null) } } }, { states[1], new Dictionary <Trigger, ITransition>() { { TriggerSet2[0], new Transition(states[0], null) }, { TriggerSet2[1], new Transition(states[1], null) }, { TriggerSet2[2], new Transition(states[2], null) } } }, { states[2], new Dictionary <Trigger, ITransition>() { { TriggerSet3[0], new Transition(states[2], null) }, { TriggerSet3[1], new Transition(states[2], null) }, { TriggerSet3[2], new Transition(states[0], null) } } } }; var expected_MatrixData = new Dictionary <State, Dictionary <Trigger, ITransition> >(input_transitionMatrix); // Execute TransitionMatrix transition_matrix = new TransitionMatrix(input_transitionMatrix); // Get result FieldInfo field_info = transition_matrix.GetType().GetField("MatrixData", BindingFlags.GetField | BindingFlags.NonPublic | BindingFlags.Instance); var actual_MatrixData = (Dictionary <State, Dictionary <Trigger, ITransition> >)field_info.GetValue(transition_matrix); // Validate Assert.Same(input_transitionMatrix, actual_MatrixData); Assert.Equal(expected_MatrixData, actual_MatrixData); }
public override void renderEvent(TransitionMatrix tm) { }
/// <summary> /// Checks pre-conditions: matrix sizes. /// </summary> private void CheckPrerequisites() { /************************** TRANSITION MATRIX ***************************/ if (TransitionMatrix is null) { throw new Exception("Transition matrix cannot be null!"); } if (TransitionMatrix.GetLength(0) != StateVectorDimension || TransitionMatrix.GetLength(1) != StateVectorDimension) { throw new Exception("Transition matrix dimensions are not valid!"); } /************************** TRANSITION MATRIX ***************************/ /************************** CONTROL MATRIX ***************************/ if (ControlMatrix is null && ControlVectorDimension != 0) { throw new Exception("Control matrix can be null only if control vector dimension is set to 0!"); } if (ControlMatrix is not null && (ControlMatrix.GetLength(0) != StateVectorDimension || ControlMatrix.GetLength(1) != ControlVectorDimension)) { throw new Exception("Control matrix dimensions are not valid!"); } /************************** CONTROL MATRIX ***************************/ /************************** MEASUREMENT MATRIX ***************************/ if (MeasurementMatrix is null) { throw new Exception("Measurement matrix cannot be null!"); } if (MeasurementMatrix.GetLength(0) != MeasurementVectorDimension || MeasurementMatrix.GetLength(1) != StateVectorDimension) { throw new Exception("Measurement matrix dimesnions are not valid!"); } /************************** MEASUREMENT MATRIX ***************************/ /************************** PROCES NOISE COV. MATRIX ***************************/ if (ProcessNoise is null) { throw new Exception("Process noise covariance matrix cannot be null!"); } if (ProcessNoise.GetLength(0) != StateVectorDimension || ProcessNoise.GetLength(1) != StateVectorDimension) { throw new Exception("Process noise covariance matrix dimensions are not valid!"); } /************************** PROCES NOISE COV. MATRIX ***************************/ /************************** MEASUREMENT NOISE COV. MATRIX ***************************/ if (MeasurementNoise is null) { throw new Exception("Measurement noise covariance matrix cannot be null!"); } if (MeasurementNoise.GetLength(0) != MeasurementVectorDimension || MeasurementNoise.GetLength(1) != MeasurementVectorDimension) { throw new Exception("Measurement noise covariance matrix dimensions are not valid!"); } /************************** MEASUREMENT NOISE COV. MATRIX ***************************/ }
public override void renderEvent(TransitionMatrix tm) { heme.setOccupied(false); tm.setAnimationState(hemeIdx, false); }
/*Event Handler*/ public override void processEvent(TransitionMatrix tm) { tm.setCurrentState(hemeIdx, false); heme = tm.getHemeObject(hemeIdx); }
/*Event Handler*/ public override void processEvent(TransitionMatrix tm) { /*update the TM, then prep the actual Heme for rendering*/ tm.setCurrentState(hemeIdx, true); heme = tm.getHemeObject(hemeIdx); }
public override void processEvent(TransitionMatrix tm) { }
public void VerifySimulatedData(MasterDBContext dbContext, DataGeneratorContext dbGeneratorCtx, ResultContext dbResultCtx, int simNumber) { var simulation = SimulationRepository.GetSimulationById(simNumber, dbGeneratorCtx); if (simulation != null) { var approach = ApproachRepository.GetApproachById(dbGeneratorCtx, simulation.ApproachId); if (approach.TransitionMatrixInput.ExtendedTransitionMatrix) { var generator = new MainGenerator(); generator.StartGeneration(approach, dbContext, dbResultCtx); var articleCount = ArticleRepository.GetArticleNamesAndCountForEachUsedArticleInSimulation(dbResultCtx, simNumber); var articlesByNames = ArticleRepository.GetArticlesByNames(articleCount.Keys.ToHashSet(), dbContext); var capabilities = ResourceCapabilityRepository.GetParentResourceCapabilities(dbContext); var actualTransitionMatrix = new TransitionMatrix { Pi = new double[capabilities.Count + 1, capabilities.Count + 1] }; var capPosByCapId = new Dictionary <int, int>(); foreach (var cap in capabilities) { var number = cap.Name.Substring(0, cap.Name.IndexOf(" ")); var pos = AlphabeticNumbering.GetNumericRepresentation(number); capPosByCapId.Add(cap.Id, pos); } foreach (var a in articlesByNames) { var operations = a.Value.Operations.ToList(); operations.Sort((o1, o2) => o1.HierarchyNumber.CompareTo(o2.HierarchyNumber)); var operationCount = 0; var lastCapPos = 0; do { var capPos = capPosByCapId[ operations[operationCount].ResourceCapability.ParentResourceCapability.Id]; actualTransitionMatrix.Pi[lastCapPos, capPos] += articleCount[a.Key]; lastCapPos = capPos + 1; operationCount++; } while (operationCount < operations.Count); actualTransitionMatrix.Pi[lastCapPos, capabilities.Count] += articleCount[a.Key]; } for (var i = 0; i <= capabilities.Count; i++) { var sum = 0.0; for (var j = 0; j <= capabilities.Count; j++) { sum += actualTransitionMatrix.Pi[i, j]; } for (var j = 0; j <= capabilities.Count; j++) { actualTransitionMatrix.Pi[i, j] /= sum; } } var transitionMatrixGenerator = new TransitionMatrixGenerator(); ActualOrganizationDegree = transitionMatrixGenerator.CalcOrganizationDegree( actualTransitionMatrix.Pi, capabilities.Count + 1); GeneratedOrganizationDegree = transitionMatrixGenerator.CalcOrganizationDegree( generator.TransitionMatrix.Pi, capabilities.Count + 1); System.Diagnostics.Debug.WriteLine("################################# Executed work plans have an organization degree of " + ActualOrganizationDegree + " (transition matrix has " + GeneratedOrganizationDegree + "; input was " + approach.TransitionMatrixInput.DegreeOfOrganization + ")"); } } }
public void GenerateBillOfMaterial(BillOfMaterialInput inputParameters, List <Dictionary <long, Node> > nodesPerLevel, TransitionMatrix transitionMatrix, MasterTableUnit units, XRandom rng) { for (var k = 0; k < nodesPerLevel.Count - 1; k++) { foreach (var article in nodesPerLevel[k].Values) { List <List <Edge> > incomingMaterialAllocation = new List <List <Edge> >(); foreach (var operation in article.Operations) { incomingMaterialAllocation.Add(new List <Edge>()); } foreach (var edge in article.IncomingEdges) { var operationNumber = rng.Next(incomingMaterialAllocation.Count); incomingMaterialAllocation[operationNumber].Add(edge); } List <List <Edge> > possibleSetsForFirstOperation = incomingMaterialAllocation.FindAll(x => x.Count > 0); var randomSet = rng.Next(possibleSetsForFirstOperation.Count); List <Edge> firstOperation = possibleSetsForFirstOperation[randomSet]; List <List <Edge> > bom = new List <List <Edge> >(); incomingMaterialAllocation.Remove(firstOperation); bom.Add(firstOperation); bom.AddRange(incomingMaterialAllocation); for (var i = 0; i < bom.Count; i++) { for (var j = 0; j < bom[i].Count; j++) { var name = "[" + bom[i][j].Start.Article.Name + "] in (" + article.Operations[i].MOperation.Name + ")"; var weight = (decimal)bom[i][j].Weight; if (inputParameters.RoundEdgeWeight || bom[i][j].Start.Article.UnitId == units.PIECES.Id) { weight = Math.Max(1, Decimal.Round(weight)); } else { weight = Math.Max(inputParameters.WeightEpsilon, weight); } var articleBom = new M_ArticleBom() { ArticleChildId = bom[i][j].Start.Article.Id, Name = name, Quantity = weight, ArticleParentId = article.Article.Id, OperationId = article.Operations[i].MOperation.Id }; article.Operations[i].Bom.Add(articleBom); } } } } }
public abstract void renderEvent(TransitionMatrix tm);