protected bool IsArgumentSeparatorMatch(SymToken aToken, out SymTokenBalancerMatchCriteria aCriteria, int aLevelNumber) { aCriteria = null; bool matchFound = false; // int index = iArgumentSeparators.IndexOf(aToken); while (index >= 0 && matchFound == false) { SymToken token = iArgumentSeparators[index]; System.Diagnostics.Debug.Assert(token.Tag != null && token.Tag is SymTokenBalancerMatchCriteria); SymTokenBalancerMatchCriteria criteria = (SymTokenBalancerMatchCriteria)token.Tag; if (criteria.Matches(aLevelNumber)) { aCriteria = criteria; matchFound = true; } else { index = iArgumentSeparators.IndexOf(aToken, index + 1); } } return(matchFound); }
public virtual void RegisterFunctionParserTokens() { // Base class registration RegisterBalancerTokens(); // These are the important tokens relating to function arguments SymToken bracketTokenOpening = new SymToken("(", SymToken.TClass.EClassSymbol, SymToken.TType.ETypeUnidentified); SymToken bracketTokenClosing = new SymToken(")", SymToken.TClass.EClassSymbol, SymToken.TType.ETypeUnidentified); SymToken squareBracketTokenOpening = new SymToken("[", SymToken.TClass.EClassSymbol, SymToken.TType.ETypeUnidentified); SymToken squareBracketTokenClosing = new SymToken("]", SymToken.TClass.EClassSymbol, SymToken.TType.ETypeUnidentified); SymToken templateBracketTokenOpening = new SymToken("<", SymToken.TClass.EClassSymbol, SymToken.TType.ETypeUnidentified); SymToken templateBracketTokenClosing = new SymToken(">", SymToken.TClass.EClassSymbol, SymToken.TType.ETypeUnidentified); // We want to track levels for square brackets and template arguments in order to ensure we balance correctly. // We don't want to remove any redundancy here as these may have special meaning. RegisterOpeningToken(squareBracketTokenOpening, squareBracketTokenClosing, true, true, TLevelExpectations.ELevelExpectationsAboveLevelNumber, 0, TAssociatedBehaviour.EBehaviourNone); RegisterClosingToken(squareBracketTokenClosing, squareBracketTokenOpening, true, true, TLevelExpectations.ELevelExpectationsAboveLevelNumber, 0, TAssociatedBehaviour.EBehaviourNone); RegisterOpeningToken(templateBracketTokenOpening, templateBracketTokenClosing, true, true, TLevelExpectations.ELevelExpectationsAboveLevelNumber, 0, TAssociatedBehaviour.EBehaviourNone); RegisterClosingToken(templateBracketTokenClosing, templateBracketTokenOpening, true, true, TLevelExpectations.ELevelExpectationsAboveLevelNumber, 0, TAssociatedBehaviour.EBehaviourNone); // Define our argument separation token(s). TAssociatedBehaviour flags = TAssociatedBehaviour.EBehaviourNone; flags |= TAssociatedBehaviour.EBehaviourCreateSubTree; flags |= TAssociatedBehaviour.EBehaviourRemoveReduntantBracketing; // SymToken commaDelimiterToken = new SymToken(",", SymToken.TClass.EClassSymbol, SymToken.TType.ETypeUnidentified); RegisterArgumentSeparatorToken(commaDelimiterToken, new SymTokenBalancerMatchCriteria(SymToken.NullToken(), false, true, TLevelExpectations.ELevelExpectationsAtLevel, 1, flags)); RegisterArgumentSeparatorToken(commaDelimiterToken, new SymTokenBalancerMatchCriteria(SymToken.NullToken(), true, true, TLevelExpectations.ELevelExpectationsAboveLevelNumber, 1, TAssociatedBehaviour.EBehaviourNone)); }
protected virtual void ArgumentStarted(SymToken aToken, SymTokenBalancerMatchCriteria aCriteria) { System.Diagnostics.Debug.Write(aToken.Value); int currentLevelNumber = CurrentLevelNumber; // Perform any base class end level behaviour PerformEndLevelBehaviour(CurrentNode, aCriteria); // Add the emit node (the rest of the code will work out whether it needs to quote it when the final // tree is formed). SymTokenBalancerNodeEmittedElement argEmitElement = new SymTokenBalancerNodeEmittedElement(aToken, aCriteria); DocumentTree.CurrentNode.Add(argEmitElement); // Always add the argument node SymTokenBalancerMarkerArgumentNode argNode = new SymTokenBalancerMarkerArgumentNode(aCriteria); DocumentTree.CurrentNode.Add(argNode); if (aCriteria.IsAssociatedBehaviourCreateSubTree) { // Make a new argument definition based upon the tokens we have in // the main document tree. SymArgument argument = MakeArgument(DocumentTree.CurrentNode); // Then notify the observer NotifyArgumentAvailable(argument, aToken); } }
private SymParserWorker.TTokenConsumptionType OnStateDefineArguments(SymToken aToken) { SymParserWorker.TTokenConsumptionType ret = SymParserWorker.TTokenConsumptionType.ETokenNotConsumed; // if (aToken.Class == SymToken.TClass.EClassWhiteSpace && iFunctionParser.CurrentLevelNumber == 0) { // Got some whitespace - so we're going to bail MakeDefineArgument(); State = TState.EStateMiddleWhiteSpace; } else if (aToken.Class == SymToken.TClass.EClassNewLine || aToken.Class == SymToken.TClass.EClassContinuation) { // Do nothing - new line is handled by base class (so we must not consume it) // and continuations are ignored. } else { // Keep reading tokens until we hit some whitespace bool consumed = iFunctionParser.OfferToken(aToken); if (consumed == true) { ret = SymParserWorker.TTokenConsumptionType.ETokenConsumed; } } // return(ret); }
private SymParserWorker CreateWorkerByTokenType(SymToken aToken) { // Find a worker to handle the token type SymParserWorkerContext context = new SymParserWorkerContext(WorkerContext.Document.Context, this, aToken); // SymParserWorker worker = null; switch (aToken.Value.ToLower()) { // Simple preprocessor operations case "prj_platforms": break; case "prj_exports": break; case "prj_testexports": break; case "prj_mmpfiles": break; case "prj_testmmpfiles": break; // Skip unhandled preprocessor directives default: break; } // return(worker); }
public override SymParserWorker.TTokenConsumptionType OfferToken(SymToken aToken) { iConsumedTokens.Append(aToken); if (aToken.Class == iTerminatingClassType) { // Work out which node will become the parent (if we are configured in that way) SymNode newParent = CalculateNewParentNode(); // Call back to parent class HandleTerminatingConditionMatch(aToken); // Reached the new line token. Stop receiving the tokens. We're done. WorkerContext.Parent.RemoveChild(this); // If the dying action was to make the relative parent node // the current one, then we must call CalculateNewParentNode again // after the HandleTerminatingConditionMatch callback - since // it may have changed the tree. if (iDyingAction == TDyingAction.EWhenDyingMakeRelativeParentNodeCurrent) { newParent = CalculateNewParentNode(); } // Update the document with the new parent node WorkerContext.Document.CurrentNode = newParent; } // return(TTokenConsumptionType.ETokenConsumed); }
protected override void HandleTerminatingConditionMatch(SymToken aToken) { switch (State) { case TState.EStateInitialWhiteSpace: break; case TState.EStateDefineName: MakeDefineName(); break; case TState.EStateDefineArguments: break; case TState.EStateMiddleWhiteSpace: break; case TState.EStateDefineValue: MakeDefineValue(); break; default: break; } // Do we have a valid define? if (iDefine.IsValid) { SymNodePreProcessorDefine defineNode = new SymNodePreProcessorDefine(); defineNode.DefineDefinition = iDefine; // WorkerContext.CurrentNode.Add(defineNode); WorkerContext.DefineDirectory.Add(iDefine); } }
protected override void HandleTerminatingConditionMatch(SymToken aToken) { // Update the include so that it contains a fully resolved path iIncludeNode.IncludeDefinition.AdjustRelativeInclude(WorkerContext.Parser.FileName); // We've now a fully resolved file name which we can parse, should // we desire... string includeFile = iIncludeNode.IncludeDefinition.Location; if (Utils.SymFileSystemUtils.FileExists(includeFile)) { // Make a new document context SymParserDocumentContext subDocumentContext = new SymParserDocumentContext(includeFile, WorkerContext); // Use the existing document, but with a new context WorkerContext.Document.PushContext(subDocumentContext); // Make a new carbon copy of this parser (whatever concrete type it may be) SymParserBase subParser = WorkerContext.Parser.CarbonCopy(new object[] { WorkerContext.Document }); // Make the waiting object SymParserWaiter waiter = new SymParserWaiter(subParser); // Now parse the file and wait for the result subParser.Parse(); waiter.Wait(); // Restore the original context WorkerContext.Document.PopContext(); } }
protected virtual void NotifyArgumentAvailable(SymArgument aArgument, SymToken aDelimitingToken) { if (EventArgumentAvailableHandler != null) { EventArgumentAvailableHandler(aArgument, aDelimitingToken); } }
private void LexerTokenHandler(SymLexer aLexer, SymLexer.TEvent aEvent, SymToken aToken) { if (aEvent == SymLexer.TEvent.EEventLexingToken) { // Store the token lock (this) { iMastermind.EnqueueLexedToken(aToken); } // and signal the worker thread if it is waiting... if (iSemaphore.Count == 0) { iSemaphore.Signal(); } } else if (aEvent == SymLexer.TEvent.EEventLexingComplete) { lock (this) { iLexerFinished = true; } // and signal the worker thread if it is waiting... if (iSemaphore.Count == 0) { iSemaphore.Signal(); } } }
public void EnqueueLexedToken( SymToken aToken ) { lock( iLexedTokens ) { iLexedTokens.Enqueue( aToken ); } }
private void DoGrouping() { ReportEvent(TEvent.EEventGroupingStarted, SymToken.NullToken()); bool lexerFinished = false; do { // Count how many tokens we have... lock (this) { iMastermind.PerformGrouping(); lexerFinished = iLexerFinished; } // Wait until there are more items to process if (lexerFinished == false) { ReportEvent(TEvent.EEventGroupingPaused, SymToken.NullToken()); iSemaphore.Wait(); ReportEvent(TEvent.EEventGroupingStarted, SymToken.NullToken()); } }while (lexerFinished == false); ReportEvent(TEvent.EEventGroupingComplete, SymToken.NullToken()); }
private void ReportEvent(TEvent aEvent, SymToken aToken) { if (LexerObservers != null) { LexerObservers(this, aEvent, aToken); } }
public override SymParserWorker.TTokenConsumptionType OfferToken(SymToken aToken) { // Offer the token to the base class first. If it doesn't want it, we'll // check it. TTokenConsumptionType ret = base.OfferToken(aToken); if (ret == TTokenConsumptionType.ETokenNotConsumed) { if (aToken.Class != SymToken.TClass.EClassComment) { // Try to find a new child worker to handle this kind // of data. SymParserWorker worker = CreateWorkerByTokenType(aToken); if (worker != null) { System.Diagnostics.Debug.WriteLine("SymWorkerBuildFileMain.OfferToken() - FOUND HANDLER FOR: " + aToken.Value); AddChild(worker); ret = TTokenConsumptionType.ETokenConsumed; } } } // return(ret); }
private void NotifyNewToken(SymToken aToken) { if (iTokenHandlers != null) { iTokenHandlers(aToken); } }
private void MastermindObserver(SymGrouperMastermind.TEvent aEvent, SymToken aGroupedToken) { // We've received a token from the grouper. Pass it on to our observer to handle. if (aEvent == SymGrouperMastermind.TEvent.EEventGroupTokenReady) { ReportEvent(TEvent.EEventGroupingTokenReady, aGroupedToken); } }
private void EnqueueNewOutputToken( SymToken aToken ) { if ( CheckIfStateChangeRequiredForEnqueuedToken( aToken ) == false ) { //System.Console.WriteLine( "Enqueue [" + aToken.Value + "]" ); iCache.Append( aToken ); } }
public void PerformGrouping() { SymToken token = NextInputToken(); // while( token != null ) { ProcessToken( token ); token = NextInputToken(); } }
private SymParserWorker CreateWorkerByTokenType(SymToken aToken) { // Find a worker to handle the token type SymParserWorker worker = null; SymParserWorkerContext context = new SymParserWorkerContext(WorkerContext, this, aToken); // switch (aToken.Value) { // Simple preprocessor operations case "define": worker = new SymPreProcessorWorkerDefine(context); break; case "undef": break; case "include": worker = new SymPreProcessorWorkerInclude(context); break; // Conditionality case "if": worker = new SymPreProcessorWorkerIf(context); break; case "ifdef": worker = new SymPreProcessorWorkerIfdef(context); break; case "ifndef": worker = new SymPreProcessorWorkerIfndef(context); break; case "else": worker = new SymPreProcessorWorkerElse(context); break; case "elif": worker = new SymPreProcessorWorkerElseIf(context); break; case "endif": worker = new SymPreProcessorWorkerEndif(context); break; // Skip unhandled preprocessor directives default: worker = new SymParserWorkerConsumer(context, SymToken.TClass.EClassNewLine); break; } // return(worker); }
public void RegisterArgumentSeparatorToken(SymToken aToken, SymTokenBalancerMatchCriteria aCriteria) { SymToken copy = new SymToken(aToken); copy.Tag = aCriteria; // if (IsTokenExactMatch(copy, iArgumentSeparators) == false) { iArgumentSeparators.Append(copy); } }
private void DoLexing() { ReportEvent(TEvent.EEventLexingStarted, SymToken.NullToken()); while (!Stream.EOF) { char character = Stream.ReadChar(); ProcessCharacter(character); } ReportEvent(TEvent.EEventLexingComplete, SymToken.NullToken()); }
private SymParserWorker.TTokenConsumptionType OnStateDefineValue(SymToken aToken) { SymParserWorker.TTokenConsumptionType ret = SymParserWorker.TTokenConsumptionType.ETokenNotConsumed; // if (aToken.Class != SymToken.TClass.EClassNewLine) { iTokens.Append(aToken); ret = SymParserWorker.TTokenConsumptionType.ETokenConsumed; } // return(ret); }
private void MakeWord() { if (iCurrentWord.Length > 0) { // Finished a word SymToken token = new SymToken(iCurrentWord.ToString(), iCurrentClass, Lexer.CurrentPosition); Lexer.FlushToken(token); // Reset iCurrentWord.Remove(0, iCurrentWord.Length); iCurrentClass = SymToken.TClass.EClassWhiteSpace; } }
private void MergeWithPreviousTwoTokens( SymToken aNewToken, SymToken.TClass aNewClassType ) { System.Diagnostics.Debug.Assert( iCache.Count > 0 ); SymToken previousToken = iCache.PopTail(); // Combine it with the new token... previousToken.Combine( aNewToken ); previousToken.Class = aNewClassType; // And combine any previous previous token MergeWithPreviousToken( previousToken ); }
private SymToken NextInputToken() { SymToken ret = null; // lock( iLexedTokens ) { if ( iLexedTokens.Count > 0 ) { ret = iLexedTokens.Dequeue(); } } // return ret; }
private void ForceMergeWithPreviousToken( SymToken aNewToken ) { if ( iCache.Count > 0 ) { if ( CheckIfStateChangeRequiredForEnqueuedToken( aNewToken ) == false ) { SymToken previousOutputToken = PreviousOutputToken; previousOutputToken.ForceCombine( aNewToken ); } } else { EnqueueNewOutputToken( aNewToken ); } }
public void MergeAllTokensWithinRange(int aStartIndex, int aEndIndex, bool aMergeInContinuations, bool aForceMerge) { int count = Count; // System.Diagnostics.Debug.Assert(count > aStartIndex); System.Diagnostics.Debug.Assert(aEndIndex < count); // Have to do this in two passes to ensure token // text remains from left to right. SymToken startingToken = this[aStartIndex++]; if (aForceMerge == false) { // Not force-merging, so need to find a valid combinable starting element while (startingToken.CombiningAllowed == false && aStartIndex < aEndIndex) { startingToken = this[++aStartIndex]; } } // First pass - join tokens for (int i = aStartIndex; i <= aEndIndex; i++) { SymToken thisToken = this[i]; // Ignore continuations during merging if (thisToken.Class != SymToken.TClass.EClassContinuation || aMergeInContinuations) { if (aForceMerge == false) { startingToken.Combine(thisToken); } else { startingToken.ForceCombine(thisToken); } } } // Second pass - discard merged tokens. for (int i = aEndIndex - 1; i >= aStartIndex; i--) { Remove(i); } //System.Diagnostics.Debug.WriteLine( "Merged: " + startingToken.Value ); }
public override SymParserWorker.TTokenConsumptionType OfferToken(SymToken aToken) { SymParserWorker.TTokenConsumptionType ret = SymParserWorker.TTokenConsumptionType.ETokenNotConsumed; // while (ret == SymParserWorker.TTokenConsumptionType.ETokenNotConsumed) { TState currentState = State; switch (State) { case TState.EStateInitialWhiteSpace: ret = OnStateInitialWhiteSpace(aToken); break; case TState.EStateDefineName: ret = OnStateDefineName(aToken); break; case TState.EStateDefineArguments: ret = OnStateDefineArguments(aToken); break; case TState.EStateMiddleWhiteSpace: ret = OnStateMiddleWhiteSpace(aToken); break; case TState.EStateDefineValue: ret = OnStateDefineValue(aToken); break; default: break; } TState newState = State; bool statesDidNotChange = (currentState == newState); if (statesDidNotChange) { // If the state handlers didn't want the token, then we // offer it to the base class instead if (ret == SymParserWorker.TTokenConsumptionType.ETokenNotConsumed) { ret = base.OfferToken(aToken); } } } return(ret); }
public override SymParserWorker.TTokenConsumptionType OfferToken(SymToken aToken) { TTokenConsumptionType ret = TTokenConsumptionType.ETokenNotConsumed; System.Diagnostics.Debug.Assert(WorkerContext.Document.CurrentNode is SymNodePreProcessorInclude); // Only consume tokens whilst we're unsure of the include type (system vs user). if (iIncludeNode.IncludeDefinition.Type == SymIncludeDefinition.TType.ETypeUndefined) { if (aToken.Class == SymToken.TClass.EClassQuotation) { // Must be a user include if its a quotation that we're handling iIncludeNode.IncludeDefinition.Type = SymIncludeDefinition.TType.ETypeUser; ret = TTokenConsumptionType.ETokenConsumed; } else if (aToken.Class == SymToken.TClass.EClassSymbol && aToken.Value == "<") { // Consume it, but don't absorb it iIncludeNode.IncludeDefinition.Type = SymIncludeDefinition.TType.ETypeSystem; ret = TTokenConsumptionType.ETokenConsumed; } } else if (iIncludeNode.IncludeDefinition.Type != SymIncludeDefinition.TType.ETypeUndefined) { // Only consume the tokens whilst we've not yet identified the include // definition location. if (iIncludeNode.IncludeDefinition.Location.Length == 0) { if (iIncludeNode.IncludeDefinition.Type == SymIncludeDefinition.TType.ETypeSystem && aToken.Class == SymToken.TClass.EClassSymbol && aToken.Value == ">") { iIncludeNode.IncludeDefinition.Location = iWorkingIncludePath.ToString(); } else if (iIncludeNode.IncludeDefinition.Type == SymIncludeDefinition.TType.ETypeUser && aToken.Class == SymToken.TClass.EClassQuotation && aToken.Type != SymToken.TType.ETypeUnidentified) { iIncludeNode.IncludeDefinition.Location = iWorkingIncludePath.ToString(); } else { iWorkingIncludePath.Append(aToken.Value); } } } // Base class will dequeue us once we reach the new line ret = base.OfferToken(aToken); return(ret); }
public int CountByType(SymToken aToken) { int count = 0; // foreach (SymToken token in this) { if (aToken.Class == token.Class && aToken.Type == token.Type && aToken.Value == token.Value) { ++count; } } // return(count); }