/// <summary> /// partial, semi-ordered tree matching algorithm. /// /// iterate over target string, no backtracking /// maintain data about matches in series match: /// indexes which are valid branched matches for the current index in target /// record of which leaves in the matcher have been matched /// </summary> /// <param name="indexInSymbolTarget"></param> /// <param name="seriesMatch"></param> /// <returns>a mapping from all symbols in seriesMatch back into the target string</returns> public bool MatchesForward( NativeMultipleHashSets.HashSetSlice includeSymbolsSet, int indexInSymbolTarget, SymbolSeriesSuffixMatcher seriesMatch, SymbolString <float> symbolString, int firstParameterCopyIndex, NativeArray <float> parameterCopyMemory, out byte paramsCopiedToMem, TmpNativeStack <BranchEventData> helperStack ) { if (!seriesMatch.HasGraphIndexes) { // this should be done in the parsing/compiling phase. should only have to happen once for the whole system, per matching rule. throw new System.Exception("graph indexes should be precomputed"); //seriesMatch.ComputeGraphIndexes(branchOpenSymbol, branchCloseSymbol); } // keep count of how many more matches are required at each level of the tree. // starts out as a copy of the child count array. each leaf will be at 0, and will go negative when matched. //var remainingMatchesAtIndexes = seriesMatch.childrenCounts.Clone() as int[]; return(MatchesForwardsAtIndexOrderingInvariant( includeSymbolsSet, indexInSymbolTarget, seriesMatch, symbolString, firstParameterCopyIndex, parameterCopyMemory, out paramsCopiedToMem, helperStack)); }
public void Execute(int startIndex, int batchSize) { var forwardsMatchHelperStack = new TmpNativeStack <SymbolStringBranchingCache.BranchEventData>(5); var rnd = LSystemStepper.RandomFromIndexAndSeed(((uint)startIndex) + 1, seed); for (int i = 0; i < batchSize; i++) { ExecuteAtIndex(i + startIndex, forwardsMatchHelperStack, ref rnd); } }
private void ExecuteAtIndex( int indexInSymbols, TmpNativeStack <SymbolStringBranchingCache.BranchEventData> helperStack, ref Unity.Mathematics.Random random) { var matchSingleton = matchSingletonData[indexInSymbols]; if (matchSingleton.isTrivial) { // if match is trivial, then no parameters are captured. the rest of the algo will read directly from the source index // and no transformation will take place. return; } var symbol = sourceData.symbols[indexInSymbols]; if (!blittableRulesByTargetSymbol.TryGetValue(symbol, out var ruleIndexing) || ruleIndexing.length <= 0) { matchSingleton.errorCode = LSystemMatchErrorCode.TRIVIAL_SYMBOL_NOT_INDICATED_AT_MATCH_TIME; matchSingletonData[indexInSymbols] = matchSingleton; return; } var anyRuleMatched = false; var currentIndexInParameterMemory = matchSingleton.tmpParameterMemorySpace.index; for (byte i = 0; i < ruleIndexing.length; i++) { var rule = blittableRulesByTargetSymbol[ruleIndexing, i]; var success = rule.PreMatchCapturedParametersWithoutConditional( branchingCache, sourceData, indexInSymbols, tmpParameterMemory, currentIndexInParameterMemory, ref matchSingleton, helperStack, globalParams, globalOperatorData, ref random, outcomes ); if (success) { anyRuleMatched = true; matchSingleton.matchedRuleIndexInPossible = i; break; } } if (anyRuleMatched == false) { matchSingleton.isTrivial = true; } matchSingletonData[indexInSymbols] = matchSingleton; }
private JobHandle ScheduleAutophagyJob(JobHandle dependency, CustomRuleSymbols customSymbols) { // autophagy is only dependent on the source string. don't need to register as dependent on native data/source symbols if (customSymbols.hasAutophagy) { var helperStack = new TmpNativeStack <AutophagyPostProcess.BranchIdentity>(10, Allocator.TempJob); var autophagicJob = new AutophagyPostProcess { symbols = target, lastIdentityStack = helperStack, customSymbols = customSymbols }; dependency = autophagicJob.Schedule(dependency); dependency = helperStack.Dispose(dependency); } return(dependency); }
public TurtleStringReadingCompletable( Mesh targetMesh, int totalSubmeshes, DependencyTracker <SymbolString <float> > symbols, DependencyTracker <NativeTurtleData> nativeData, int branchStartChar, int branchEndChar, TurtleState defaultState, CustomRuleSymbols customSymbols, TurtleVolumeWorldReferences volumetrics, Matrix4x4 localToWorldTransform) { this.targetMesh = targetMesh; this.nativeData = nativeData; UnityEngine.Profiling.Profiler.BeginSample("turtling job"); JobHandleWrapper volumetricJobHandle = currentJobHandle; var volumetricHandles = new TurtleVolumetricHandles { durabilityWriter = volumetrics.durabilityWriter.GetNextNativeWritableHandle(localToWorldTransform, ref volumetricJobHandle), universalWriter = volumetrics.universalLayerWriter.GetNextNativeWritableHandle(localToWorldTransform), volumetricData = volumetrics.world.NativeVolumeData.openReadData.AsReadOnly() }; currentJobHandle = volumetricJobHandle; UnityEngine.Profiling.Profiler.BeginSample("allocating"); var tmpHelperStack = new TmpNativeStack <TurtleState>(50, Allocator.TempJob); organInstances = new NativeList <TurtleOrganInstance>(100, Allocator.TempJob); newMeshSizeBySubmesh = new NativeArray <TurtleMeshAllocationCounter>(totalSubmeshes, Allocator.TempJob); UnityEngine.Profiling.Profiler.EndSample(); NativeArray <float> destructionCommandTimestamps; if (volumetrics.damageFlags != null) { destructionCommandTimestamps = volumetrics.damageFlags.GetDestructionCommandTimestampsReadOnly(); } else { destructionCommandTimestamps = new NativeArray <float>(0, Allocator.TempJob); } var entitySpawningSystem = World.DefaultGameObjectInjectionWorld.GetOrCreateSystem <BeginSimulationEntityCommandBufferSystem>(); var entitySpawnBuffer = entitySpawningSystem.CreateCommandBuffer(); var turtleCompileJob = new TurtleCompilationJob { symbols = symbols.Data, operationsByKey = nativeData.Data.operationsByKey, organData = nativeData.Data.allOrganData, organInstances = organInstances, newMeshSizeBySubmesh = newMeshSizeBySubmesh, spawnEntityBuffer = entitySpawnBuffer, nativeTurtleStack = tmpHelperStack, branchStartChar = branchStartChar, branchEndChar = branchEndChar, currentState = defaultState, customRules = customSymbols, volumetricHandles = volumetricHandles, hasVolumetricDestruction = volumetrics.damageFlags != null, volumetricDestructionTimestamps = destructionCommandTimestamps, earliestValidDestructionCommand = volumetrics.damageFlags != null ? Time.time - volumetrics.damageFlags.timeCommandStaysActive : -1 }; currentJobHandle = turtleCompileJob.Schedule(currentJobHandle); volumetrics.world.NativeVolumeData.RegisterReadingDependency(currentJobHandle); entitySpawningSystem.AddJobHandleForProducer(currentJobHandle); volumetrics.damageFlags?.RegisterReaderOfDestructionFlags(currentJobHandle); volumetrics.durabilityWriter.RegisterWriteDependency(currentJobHandle); volumetrics.universalLayerWriter.RegisterWriteDependency(currentJobHandle); nativeData.RegisterDependencyOnData(currentJobHandle); symbols.RegisterDependencyOnData(currentJobHandle); currentJobHandle = tmpHelperStack.Dispose(currentJobHandle); if (volumetrics.damageFlags == null) { currentJobHandle = destructionCommandTimestamps.Dispose(currentJobHandle); } UnityEngine.Profiling.Profiler.EndSample(); }
private void ExtractEdgesAndNodes() { var branchSymbolParentStack = new TmpNativeStack <BranchEvent>(5); var currentNodeParent = -1; for (int symbolIndex = 0; symbolIndex < inPlaceSymbols.Length; symbolIndex++) { var symbol = inPlaceSymbols[symbolIndex]; if (symbol == customSymbols.diffusionNode) { if (currentNodeParent >= 0) { var newEdge = new DiffusionEdge { nodeAIndex = currentNodeParent, nodeBIndex = working.nodes.Length }; working.allEdges.Add(newEdge); } currentNodeParent = working.nodes.Length; var nodeParams = inPlaceSymbols.parameters[symbolIndex]; var newNode = new DiffusionNode { indexInTarget = symbolIndex, targetParameters = nodeParams, indexInTempAmountList = working.nodeAmountsListA.Length, totalResourceTypes = (nodeParams.length - 1) / 2, diffusionConstant = inPlaceSymbols.parameters[nodeParams, 0], }; newNode.targetParameters.length = nodeParams.length; working.nodes.Add(newNode); for (int resourceType = 0; resourceType < newNode.totalResourceTypes; resourceType++) { var currentAmount = inPlaceSymbols.parameters[nodeParams, resourceType * 2 + 1]; var maxCapacity = inPlaceSymbols.parameters[nodeParams, resourceType * 2 + 1 + 1]; working.nodeAmountsListA.Add(currentAmount); working.nodeAmountsListB.Add(0); working.nodeMaxCapacities.Add(maxCapacity); } } else if (symbol == customSymbols.diffusionAmount) { var modifiedNode = working.nodes[currentNodeParent]; var amountParameters = inPlaceSymbols.parameters[symbolIndex]; inPlaceSymbols.parameters[symbolIndex] = new JaggedIndexing { index = amountParameters.index, length = 0 }; if (currentNodeParent < 0) { // problem: the amount will dissapear continue; } for (int resourceType = 0; resourceType < modifiedNode.totalResourceTypes && resourceType < amountParameters.length; resourceType++) { working.nodeAmountsListA[modifiedNode.indexInTempAmountList + resourceType] += inPlaceSymbols.parameters[amountParameters, resourceType]; } } else if (symbol == customSymbols.branchOpenSymbol) { branchSymbolParentStack.Push(new BranchEvent { openBranchSymbolIndex = symbolIndex, currentNodeParent = currentNodeParent }); } else if (symbol == customSymbols.branchCloseSymbol) { if (branchSymbolParentStack.Count <= 0) { // uh oh. idk how this is happening but it is. probably related to the volumetric destruction and autophagy. break; } var lastBranchState = branchSymbolParentStack.Pop(); currentNodeParent = lastBranchState.currentNodeParent; } } }
public bool PreMatchCapturedParametersWithoutConditional( SymbolStringBranchingCache branchingCache, SymbolString <float> source, int indexInSymbols, NativeArray <float> parameterMemory, int startIndexInParameterMemory, ref LSystemSingleSymbolMatchData matchSingletonData, TmpNativeStack <SymbolStringBranchingCache.BranchEventData> helperStack, NativeArray <float> globalParams, NativeArray <OperatorDefinition> globalOperatorData, ref Unity.Mathematics.Random random, NativeArray <RuleOutcome.Blittable> outcomes) { var target = targetSymbolWithParameters; // parameters byte matchedParameterNum = 0; // context match if (contextPrefix.IsValid && contextPrefix.graphNodeMemSpace.length > 0) { var backwardsMatchMatches = branchingCache.MatchesBackwards( branchingCache.includeSymbols[ruleGroupIndex], indexInSymbols, contextPrefix, source, startIndexInParameterMemory + matchedParameterNum, parameterMemory, out var copiedParameters ); if (!backwardsMatchMatches) { return(false); } matchedParameterNum += copiedParameters; } var coreParametersIndexing = source.parameters[indexInSymbols]; if (coreParametersIndexing.length != target.parameterLength) { return(false); } if (coreParametersIndexing.length > 0) { for (int i = 0; i < coreParametersIndexing.length; i++) { var paramValue = source.parameters[coreParametersIndexing, i]; parameterMemory[startIndexInParameterMemory + matchedParameterNum] = paramValue; matchedParameterNum++; } } if (contextSuffix.IsCreated && contextSuffix.graphNodeMemSpace.length > 0) { var forwardMatch = branchingCache.MatchesForward( branchingCache.includeSymbols[ruleGroupIndex], indexInSymbols, contextSuffix, source, startIndexInParameterMemory + matchedParameterNum, parameterMemory, out var copiedParameters, helperStack); if (!forwardMatch) { return(false); } matchedParameterNum += copiedParameters; } matchSingletonData.tmpParameterMemorySpace = new JaggedIndexing { index = startIndexInParameterMemory, length = matchedParameterNum }; if (conditional.IsValid) { var conditionalMatch = conditional.EvaluateExpression( globalParams, new JaggedIndexing { index = 0, length = (ushort)globalParams.Length }, parameterMemory, matchSingletonData.tmpParameterMemorySpace, globalOperatorData) > 0; if (!conditionalMatch) { return(false); } } matchSingletonData.selectedReplacementPattern = SelectOutcomeIndex(ref random, outcomes, possibleOutcomeIndexing); var outcomeObject = outcomes[matchSingletonData.selectedReplacementPattern + possibleOutcomeIndexing.index]; matchSingletonData.replacementSymbolIndexing = JaggedIndexing.GetWithOnlyLength(outcomeObject.replacementSymbolSize); matchSingletonData.replacementParameterIndexing = JaggedIndexing.GetWithOnlyLength(outcomeObject.replacementParameterCount); return(true); }
private void ExtractEdgesAndNodes() { var branchSymbolParentStack = new TmpNativeStack <BranchEvent>(5); var currentNodeParent = -1; for (int symbolIndex = 0; symbolIndex < sourceData.Length; symbolIndex++) { var symbol = sourceData[symbolIndex]; if (symbol == customSymbols.diffusionNode) { if (currentNodeParent >= 0) { var newEdge = new DiffusionEdge { nodeAIndex = currentNodeParent, nodeBIndex = working.nodes.Length }; working.allEdges.Add(newEdge); } currentNodeParent = working.nodes.Length; var nodeParams = sourceData.parameters[symbolIndex]; var nodeSingleton = matchSingletonData[symbolIndex]; var newNode = new DiffusionNode { indexInTarget = nodeSingleton.replacementSymbolIndexing.index, targetParameters = nodeSingleton.replacementParameterIndexing, indexInTempAmountList = working.nodeAmountsListA.Length, totalResourceTypes = (nodeParams.length - 1) / 2, diffusionConstant = sourceData.parameters[nodeParams, 0], }; newNode.targetParameters.length = nodeParams.length; working.nodes.Add(newNode); for (int resourceType = 0; resourceType < newNode.totalResourceTypes; resourceType++) { var currentAmount = sourceData.parameters[nodeParams, resourceType * 2 + 1]; var maxCapacity = sourceData.parameters[nodeParams, resourceType * 2 + 1 + 1]; working.nodeAmountsListA.Add(currentAmount); working.nodeAmountsListB.Add(0); working.nodeMaxCapacities.Add(maxCapacity); } } else if (symbol == customSymbols.diffusionAmount) { var modifiedNode = working.nodes[currentNodeParent]; var amountParameters = sourceData.parameters[symbolIndex]; if (amountParameters.length == 0) { // the amount has no parameters left. removal will be happening via regular update continue; } // clear out the parameters in the target string, and write the symbol over var nodeSingleton = matchSingletonData[symbolIndex]; targetData.parameters[nodeSingleton.replacementSymbolIndexing.index] = new JaggedIndexing { index = nodeSingleton.replacementParameterIndexing.index, length = 0 }; targetData[nodeSingleton.replacementSymbolIndexing.index] = customSymbols.diffusionAmount; if (currentNodeParent < 0) { // problem: the amount will dissapear continue; } for (int resourceType = 0; resourceType < modifiedNode.totalResourceTypes && resourceType < amountParameters.length; resourceType++) { working.nodeAmountsListA[modifiedNode.indexInTempAmountList + resourceType] += sourceData.parameters[amountParameters, resourceType]; } } else if (symbol == customSymbols.branchOpenSymbol) { branchSymbolParentStack.Push(new BranchEvent { openBranchSymbolIndex = symbolIndex, currentNodeParent = currentNodeParent }); } else if (symbol == customSymbols.branchCloseSymbol) { var lastBranchState = branchSymbolParentStack.Pop(); currentNodeParent = lastBranchState.currentNodeParent; } } }
/// <summary> /// check for a match, enforcing the same ordering in the target match as defined in the matching pattern. /// </summary> /// <param name="originIndexInTarget"></param> /// <param name="seriesMatch"></param> /// <param name="consumedTargetIndexes"></param> /// <returns></returns> private bool MatchesForwardsAtIndexOrderingInvariant( NativeMultipleHashSets.HashSetSlice includeSymbolSet, int originIndexInTarget, SymbolSeriesSuffixMatcher seriesMatch, SymbolString <float> symbolString, int firstParameterCopyIndex, NativeArray <float> parameterCopyMemory, out byte paramsCopiedToMem, TmpNativeStack <BranchEventData> helperStack ) { helperStack.Reset(); var targetParentIndexStack = helperStack;// new TmpNativeStack<BranchEventData>(5);// new Stack<BranchEventData>(); int currentParentIndexInTarget = originIndexInTarget; var targetIndexesToMatchIndexes = new NativeHashMap <int, int>(seriesMatch.graphNodeMemSpace.length, Allocator.Temp);// new Dictionary<int, int>(); paramsCopiedToMem = 0; var indexInMatchDFSState = seriesMatch.GetImmutableDepthFirstIterationState(nativeRuleData); targetIndexesToMatchIndexes.Add(originIndexInTarget, indexInMatchDFSState.currentIndex); if (!indexInMatchDFSState.Next(out indexInMatchDFSState)) { return(true); // if the match is empty, automatically matches. //return targetIndexesToMatchIndexes; } for (int indexInTarget = originIndexInTarget + 1; indexInTarget < symbolString.Length; indexInTarget++) { var targetSymbol = symbolString[indexInTarget]; if (!includeSymbolSet.Contains(targetSymbol)) { continue; } if (targetSymbol == branchOpenSymbol) { targetParentIndexStack.Push(new BranchEventData { currentParentIndex = currentParentIndexInTarget, openBranchSymbolIndex = indexInTarget, paramsCopiedAtThisPoint = paramsCopiedToMem }); } else if (targetSymbol == branchCloseSymbol) { // will encounter a close symbol in one of two cases: // 1. the branch in target has exactly matched the branch in the matcher, and we should just step down // 2. the branch in target has terminated early, meaning we must step down the branch chain and also // reverse the matcher DFS back to a common ancenstor if (targetParentIndexStack.Count <= 0) { // if we encounter the end of the branch which contains the origin index before full match, fail. return(false); } var lastBranch = targetParentIndexStack.Pop(); currentParentIndexInTarget = lastBranch.currentParentIndex; //paramsCopiedToMem = lastBranch.paramsCopiedAtThisPoint; var parentInMatch = targetIndexesToMatchIndexes[currentParentIndexInTarget]; var parentOfSearchState = indexInMatchDFSState.GetParentIndex(); if (parentInMatch != parentOfSearchState) { // if the parents dont match, that means that the algo will be stepping backwards to the last branch sybmol. paramsCopiedToMem = lastBranch.paramsCopiedAtThisPoint; } } else { // reverse the DFS in matcher, back to the last point which shares a parent with the current parent // this acts to ensure the entry to the match has a shared parent, if at all possible. // the reversal is necessary when a branching structure failed to match in the last step var parentInMatch = targetIndexesToMatchIndexes[currentParentIndexInTarget]; if (indexInMatchDFSState.FindPreviousWithParent(out var reversedMatchIndex, parentInMatch)) { indexInMatchDFSState = reversedMatchIndex; } var indexInMatch = indexInMatchDFSState.currentIndex; var currentTargetMatchesMatcher = TargetSymbolMatchesAndParentMatches( seriesMatch, targetIndexesToMatchIndexes, currentParentIndexInTarget, indexInTarget, indexInMatch, symbolString); if (currentTargetMatchesMatcher) { targetIndexesToMatchIndexes.Add(indexInTarget, indexInMatch); var paramsToCopy = symbolString.parameters[indexInTarget]; for (int paramIndex = 0; paramIndex < paramsToCopy.length; paramIndex++) { parameterCopyMemory[firstParameterCopyIndex + paramsCopiedToMem] = symbolString.parameters[paramsToCopy, paramIndex]; paramsCopiedToMem++; } currentParentIndexInTarget = indexInTarget; // series continuation includes implicit parenting if (!indexInMatchDFSState.Next(out indexInMatchDFSState)) { return(true); } } else { // symbol in target isn't a valid match, so no further symbols in the current target branching structure can match. // rewind back to the previous branching symbol, and skip this whole structure. // Or if we're not in a nested structure, fail. if (targetParentIndexStack.Count <= 0) { return(false); } var lastBranch = targetParentIndexStack.Pop(); currentParentIndexInTarget = lastBranch.currentParentIndex; //paramsCopiedToMem = lastBranch.paramsCopiedAtThisPoint; indexInTarget = FindClosingBranchIndexReadonly(lastBranch.openBranchSymbolIndex); var parentInMatch1 = targetIndexesToMatchIndexes[currentParentIndexInTarget]; var parentOfSearchState = indexInMatchDFSState.GetParentIndex(); if (parentInMatch1 != parentOfSearchState) { // if the parents dont match, that means that the algo will be stepping backwards to the last branch sybmol on the next update. paramsCopiedToMem = lastBranch.paramsCopiedAtThisPoint; } } } } return(false); }