public TransitionMatrix GenerateTransitionMatrix(TransitionMatrixInput inputParameters, ProductStructureInput inputProductStructure, XRandom rng) { var jCorrector = 0; var matrixSize = inputParameters.WorkingStations.Count; if (inputParameters.ExtendedTransitionMatrix) { matrixSize += 1; jCorrector = 1; } _piA = new double[matrixSize, matrixSize]; _piB = new double[matrixSize, matrixSize]; InitializePiA(inputParameters, rng, matrixSize, jCorrector); InitializePiB(inputParameters, inputProductStructure, matrixSize, jCorrector); while (Math.Abs(_organizationDegreeA - inputParameters.DegreeOfOrganization) > 0.001) { Bisection(inputParameters, matrixSize); } var transitionMatrix = new TransitionMatrix { Pi = _piA }; return(transitionMatrix); }
private static void DeterminationOfEdgeWeights(ProductStructureInput inputParameters, ProductStructure productStructure, XRandom rng) { var logNormalDistribution = LogNormal.WithMeanVariance(inputParameters.MeanIncomingMaterialAmount, Math.Pow(inputParameters.StdDevIncomingMaterialAmount, 2), rng.GetRng()); foreach (var edge in productStructure.Edges) { edge.Weight = logNormalDistribution.Sample(); } }
public void VerifyComplexityAndReutilizationRation(ProductStructureInput input, ProductStructure productStructure) { var nodesOfLastAssemblyLevelCounter = productStructure.NodesPerLevel[input.DepthOfAssembly - 1].LongCount(); var actualComplexityRatio = productStructure.Edges.Count / (double)(productStructure.NodesCounter - nodesOfLastAssemblyLevelCounter); var actualReutilizationRation = productStructure.Edges.Count / (double)(productStructure.NodesCounter - input.EndProductCount); System.Diagnostics.Debug.WriteLine("################################# Generated product structure has a complexity ratio of " + actualComplexityRatio + " (input was " + input.ComplexityRatio + ")"); System.Diagnostics.Debug.WriteLine("################################# Generated product structure has a reutilization ratio of " + actualReutilizationRation + " (input was " + input.ReutilisationRatio + ")"); }
private static long GeneratePartsForEachLevel(ProductStructureInput inputParameters, ProductStructure productStructure, List <HashSet <long> > availableNodes, MasterTableArticleType articleTypes, MasterTableUnit units, M_Unit[] unitCol, XRandom rng, int i, bool sampleWorkPlanLength, TruncatedDiscreteNormal truncatedDiscreteNormalDistribution) { //Problem mit Algorithmus aus SYMTEP: bei ungünstigen Eingabeparametern gibt es auf manchen Fertigungsstufen keine Teile (0 Knoten) //-> Es fehlt wohl Nebenbedingung, dass Anzahl an Teilen auf jeden Fertigungsstufe mindestens 1 sein darf //-> Entsprechend wurde das hier angepasst var nodeCount = Math.Max(1, Convert.ToInt64(Math.Round( Math.Pow(inputParameters.ComplexityRatio / inputParameters.ReutilisationRatio, i - 1) * inputParameters.EndProductCount))); var nodesCurrentLevel = new Dictionary <long, Node>(); productStructure.NodesPerLevel.Add(nodesCurrentLevel); var availableNodesOnThisLevel = new HashSet <long>(); availableNodes.Add(availableNodesOnThisLevel); bool toPurchase, toBuild; M_Unit unit = null; M_ArticleType articleType; if (i == 1) { toPurchase = false; toBuild = true; unit = units.PIECES; articleType = articleTypes.PRODUCT; } else if (i == inputParameters.DepthOfAssembly) { toPurchase = true; toBuild = false; articleType = articleTypes.MATERIAL; } else { toPurchase = false; toBuild = true; unit = units.PIECES; articleType = articleTypes.ASSEMBLY; } for (long j = 0; j < nodeCount; j++) { unit = GeneratePartsForCurrentLevel(inputParameters, unitCol, rng, i, sampleWorkPlanLength, truncatedDiscreteNormalDistribution, availableNodesOnThisLevel, j, unit, articleType, toPurchase, toBuild, nodesCurrentLevel); } return(nodeCount); }
private void GenerateFirstSetOfEdgesForDivergingMaterialFlow(ProductStructureInput inputParameters, ProductStructure productStructure, XRandom rng, List <HashSet <long> > availableNodes) { for (var i = inputParameters.DepthOfAssembly; i >= 2; i--) { for (long j = 1; j <= productStructure.NodesPerLevel[i - 1].LongCount() /*&& availableNodes[i - 2].LongCount() > 0*/; j++) { var endNodePos = rng.NextLong(availableNodes[i - 2].LongCount()); var endNode = availableNodes[i - 2].ToArray()[endNodePos]; var edge = new Edge { Start = productStructure.NodesPerLevel[i - 1][j - 1], End = productStructure.NodesPerLevel[i - 2][endNode] }; edge.End.IncomingEdges.Add(edge); productStructure.Edges.Add(edge); availableNodes[i - 2].Remove(endNode); } } for (var i = 1; i < inputParameters.DepthOfAssembly; i++) { var pk = GetCumulatedProbabilitiesPk2(i, inputParameters.DepthOfAssembly); foreach (var j in availableNodes[i - 1]) { var u = rng.NextDouble(); var sum = 0.0; var k = 0; while (k < pk.Count - 1) { sum += pk[k].Value; if (u < sum) { break; } k++; } var assemblyLevelOfStartNode = pk[k].Key; var posOfNode = rng.NextLong(productStructure.NodesPerLevel[assemblyLevelOfStartNode - 1].LongCount()); var edge = new Edge { Start = productStructure.NodesPerLevel[assemblyLevelOfStartNode - 1][posOfNode], End = productStructure.NodesPerLevel[i - 1][j] }; edge.End.IncomingEdges.Add(edge); productStructure.Edges.Add(edge); } } }
private void GenerateFirstSetOfEdgesForConvergingMaterialFlow(ProductStructureInput inputParameters, XRandom rng, Dictionary <int, List <KeyValuePair <int, double> > > pkPerI, List <HashSet <long> > availableNodes, ProductStructure productStructure) { for (var i = 1; i <= inputParameters.DepthOfAssembly - 1; i++) { for (long j = 1; j <= productStructure.NodesPerLevel[i - 1].LongCount() /*&& availableNodes[i].LongCount() > 0*/; j++) { var startNodePos = rng.NextLong(availableNodes[i].LongCount()); var startNode = availableNodes[i].ToArray()[startNodePos]; var edge = new Edge { Start = productStructure.NodesPerLevel[i][startNode], End = productStructure.NodesPerLevel[i - 1][j - 1] }; edge.End.IncomingEdges.Add(edge); productStructure.Edges.Add(edge); availableNodes[i].Remove(startNode); } } for (var i = inputParameters.DepthOfAssembly; i >= 2; i--) { foreach (var j in availableNodes[i - 1]) { var u = rng.NextDouble(); var sum = 0.0; var k = 0; while (k < pkPerI[i].Count - 1) { sum += pkPerI[i][k].Value; if (u < sum) { break; } k++; } var assemblyLevelOfEndNode = pkPerI[i][k].Key; var posOfNode = rng.NextLong(productStructure.NodesPerLevel[assemblyLevelOfEndNode - 1].LongCount()); var edge = new Edge { Start = productStructure.NodesPerLevel[i - 1][j], End = productStructure.NodesPerLevel[assemblyLevelOfEndNode - 1][posOfNode] }; edge.End.IncomingEdges.Add(edge); productStructure.Edges.Add(edge); } } }
// Wie könnte man Testen, ob der Algorithmus dem aus SYMTEP enspricht (keine Fehler enthält) public ProductStructure GenerateProductStructure(ProductStructureInput inputParameters, MasterTableArticleType articleTypes, MasterTableUnit units, M_Unit[] unitCol, XRandom rng) { var productStructure = new ProductStructure(); var availableNodes = new List <HashSet <long> >(); GenerateParts(inputParameters, productStructure, availableNodes, articleTypes, units, unitCol, rng); GenerateEdges(inputParameters, productStructure, rng, availableNodes); DeterminationOfEdgeWeights(inputParameters, productStructure, rng); return(productStructure); }
private void InitializePiB(TransitionMatrixInput inputParameters, ProductStructureInput inputProductStructure, int matrixSize, int jCorrector) { if (_organizationDegreeA > inputParameters.DegreeOfOrganization) { for (var i = 0; i < matrixSize; i++) { for (var j = 0; j < matrixSize; j++) { _piB[i, j] = 1.0 / matrixSize; } } _organizationDegreeB = 0.0; } else { var jForSpecialCase = Convert.ToInt32(Math.Truncate( matrixSize - matrixSize / Convert.ToDecimal(inputProductStructure.DepthOfAssembly) + 1)); for (var i = 0; i < matrixSize; i++) { for (var j = 0; j < matrixSize; j++) { /* * if (i < matrixSize - 1 && j + jCorrector == i + 1) * { * _piB[i, j] = 1.0; * } * else if (i == matrixSize - 1 && j + jCorrector == jForSpecialCase - 1) * { * _piB[i, j] = 1.0; * } */ // Es wurde entschieden, dass in diesem Fall die Matrix piB wie eine ("verschobene") Einheitsmatrix initiiert werden soll, damit jede Maschine eine andere Folgemaschine besitzt. // Grund: Wegen dem speziellen Fall, der für i = M-1 gilt, wo eine 1 gesetzt wird bei j = trunc(M - M/FT + 1) und dem dadurch verursachden Problem, dass (fast) nie zur ersten Maschine zurückgekerht werden kann (bei hohen OGs) und zusätzlich eine Übergangsschleife zwischen einigen Maschinen verursacht wird. if ((j + jCorrector) % matrixSize == (i + 1) % matrixSize) { _piB[i, j] = 1.0; } } } _organizationDegreeB = 1.0; } }
private static void GenerateSecondSetOfEdges(ProductStructureInput inputParameters, ProductStructure productStructure, XRandom rng, long edgeCount, Dictionary <int, List <KeyValuePair <int, double> > > pkPerI) { var possibleStartNodes = productStructure.NodesCounter - inputParameters.EndProductCount; for (var j = productStructure.Edges.LongCount() + 1; j <= edgeCount; j++) { var startNodePos = rng.NextLong(possibleStartNodes) + 1; var assemblyLevelOfStartNode = 2; while (assemblyLevelOfStartNode < inputParameters.DepthOfAssembly) { if (startNodePos <= productStructure.NodesPerLevel[assemblyLevelOfStartNode - 1].LongCount()) { break; } startNodePos -= productStructure.NodesPerLevel[assemblyLevelOfStartNode - 1].LongCount(); assemblyLevelOfStartNode++; } var u = rng.NextDouble(); var sum = 0.0; var k = 0; while (k < pkPerI[assemblyLevelOfStartNode].Count - 1) { sum += pkPerI[assemblyLevelOfStartNode][k].Value; if (u < sum) { break; } k++; } var assemblyLevelOfEndNode = pkPerI[assemblyLevelOfStartNode][k].Key; var endNodePos = rng.NextLong(productStructure.NodesPerLevel[assemblyLevelOfEndNode - 1].LongCount()); var edge = new Edge { Start = productStructure.NodesPerLevel[assemblyLevelOfStartNode - 1][startNodePos - 1], End = productStructure.NodesPerLevel[assemblyLevelOfEndNode - 1][endNodePos] }; edge.End.IncomingEdges.Add(edge); productStructure.Edges.Add(edge); } }
private void GenerateParts(ProductStructureInput inputParameters, ProductStructure productStructure, List <HashSet <long> > availableNodes, MasterTableArticleType articleTypes, MasterTableUnit units, M_Unit[] unitCol, XRandom rng) { bool sampleWorkPlanLength = inputParameters.MeanWorkPlanLength != null && inputParameters.VarianceWorkPlanLength != null; TruncatedDiscreteNormal truncatedDiscreteNormalDistribution = null; if (sampleWorkPlanLength) { truncatedDiscreteNormalDistribution = new TruncatedDiscreteNormal(1, null, Normal.WithMeanVariance((double)inputParameters.MeanWorkPlanLength, (double)inputParameters.VarianceWorkPlanLength, rng.GetRng())); } for (var i = 1; i <= inputParameters.DepthOfAssembly; i++) { productStructure.NodesCounter += GeneratePartsForEachLevel(inputParameters, productStructure, availableNodes, articleTypes, units, unitCol, rng, i, sampleWorkPlanLength, truncatedDiscreteNormalDistribution); } }
private void GenerateEdges(ProductStructureInput inputParameters, ProductStructure productStructure, XRandom rng, List <HashSet <long> > availableNodes) { var nodesOfLastAssemblyLevelCounter = productStructure.NodesPerLevel[inputParameters.DepthOfAssembly - 1].LongCount(); var edgeCount = Convert.ToInt64(Math.Round(Math.Max( inputParameters.ReutilisationRatio * (productStructure.NodesCounter - inputParameters.EndProductCount), inputParameters.ComplexityRatio * (productStructure.NodesCounter - nodesOfLastAssemblyLevelCounter)))); var pkPerI = GetSetOfCumulatedProbabilitiesPk1(inputParameters.DepthOfAssembly); if (inputParameters.ReutilisationRatio < inputParameters.ComplexityRatio) { GenerateFirstSetOfEdgesForConvergingMaterialFlow(inputParameters, rng, pkPerI, availableNodes, productStructure); } else { GenerateFirstSetOfEdgesForDivergingMaterialFlow(inputParameters, productStructure, rng, availableNodes); } //scheinbar können hierbei Multikanten entstehen. ist das in Erzeugnisstruktur erlaubt? -> stellt kein Problem dar GenerateSecondSetOfEdges(inputParameters, productStructure, rng, edgeCount, pkPerI); }
private static M_Unit GeneratePartsForCurrentLevel(ProductStructureInput inputParameters, M_Unit[] unitCol, XRandom rng, int i, bool sampleWorkPlanLength, TruncatedDiscreteNormal truncatedDiscreteNormalDistribution, HashSet <long> availableNodesOnThisLevel, long j, M_Unit unit, M_ArticleType articleType, bool toPurchase, bool toBuild, Dictionary <long, Node> nodesCurrentLevel) { availableNodesOnThisLevel.Add(j); if (i == inputParameters.DepthOfAssembly) { var pos = rng.Next(unitCol.Length); unit = unitCol[pos]; } var node = new Node { AssemblyLevel = i, Article = new M_Article { Name = "Material " + i + "." + (j + 1), ArticleTypeId = articleType.Id, CreationDate = DateTime.Now, DeliveryPeriod = 5, UnitId = unit.Id, Price = 10, ToPurchase = toPurchase, ToBuild = toBuild } }; nodesCurrentLevel[j] = node; if (sampleWorkPlanLength && i != inputParameters.DepthOfAssembly) { node.WorkPlanLength = truncatedDiscreteNormalDistribution.Sample(); } return(unit); }