public WhenAttemptingToGetATokenFromThePegLexer() : base() { lexer = MockRepository.GenerateMock<BooTokenLexer>(); token = new TokenInfo(); pegToken = new ColorizerToken(); }
// ParseReason.CompleteWord // ParseReason.DisplayMemberList // ParseReason.MemberSelect // ParseReason.MemberSelectAndHilightBraces public override Microsoft.VisualStudio.Package.Declarations GetDeclarations(IVsTextView view, int line, int col, TokenInfo info, ParseReason reason) { IList<Declaration> declarations; switch (reason) { case ParseReason.CompleteWord: var tokenInfo = GetTokenInfoOfFirstTokenOnLine(view, line, col); if (tokenInfo.Token == (int)GherkinTerm.Step) declarations = resolver.FindMembers(StepProvider, Grammar, line, col); else declarations = resolver.FindCompletions(StepProvider, Grammar, line, col); break; case ParseReason.DisplayMemberList: case ParseReason.MemberSelect: case ParseReason.MemberSelectAndHighlightBraces: declarations = resolver.FindMembers(StepProvider, Grammar, line, col); break; default: throw new ArgumentException("reason"); } return new Declarations(declarations); }
/// <summary> /// This method is used to parse next language token from the current line and return information about it. /// </summary> /// <param name="tokenInfo"> The TokenInfo structure to be filled in.</param> /// <param name="state"> The scanner's current state value.</param> /// <returns>Returns true if a token was parsed from the current line and information returned; /// otherwise, returns false indicating no more tokens are on the current line.</returns> public bool ScanTokenAndProvideInfoAboutIt(TokenInfo tokenInfo, ref int state) { // If input string is empty - there is nothing to parse - so, return false if (sourceString.Length == 0) { return false; } TokenColor color = TokenColor.Text; int charsMatched = 0; // Compare input string with patterns from correspondence table MatchRegEx(sourceString, ref charsMatched, ref color); // Fill in TokenInfo structure on the basis of examination if (tokenInfo != null) { tokenInfo.Color = color; tokenInfo.Type = TokenType.Text; tokenInfo.StartIndex = currentPos; tokenInfo.EndIndex = Math.Max(currentPos, currentPos + charsMatched - 1); } // Move current position currentPos += charsMatched; // Set an unprocessed part of string as a source sourceString = sourceString.Substring(charsMatched); return true; }
public void BoolTypeReference() { var results = RunCompiler( @"a as bool" ); var mToken = results.GetMappedToken(0, 5); TextSpan ts = new TextSpan(); TextSpan expected = new TextSpan(); expected.iStartLine = 0; expected.iEndLine = 0; expected.iStartIndex = 5; expected.iEndIndex = 9; Assert.NotNull(mToken); Assert.AreEqual(2, mToken.Nodes.Count); Assert.IsInstanceOf(typeof(MappedTypeReference), (mToken.Nodes[1])); Assert.AreEqual("struct bool", mToken.GetDataTiptext(out ts)); Assert.AreEqual(ts, expected); mToken.Goto(out ts); TokenInfo ti = new TokenInfo(); ParseReason pr = new ParseReason(); mToken.GetDeclarations(ti, pr); }
public bool ScanTokenAndProvideInfoAboutIt(TokenInfo tokenInfo, ref int state) { //Debug.Print("LineScanner.ScanToken({1}) using {0}", _parser != null && _parser.Language != null ? _parser.Language.Grammar : null, state); // Reads each token in a source line and performs syntax coloring. It will continue to // be called for the source until false is returned. //Debug.Print("reading token from {0}", _parser.Context != null && _parser.Context.Source != null ? _parser.Context.Source.Text : "<null>"); if (_queuedTokens.Count > 0) { UpdateTokenInfoFromQueue(tokenInfo); return Returning(tokenInfo, state, _previousToken, true); } Token token = _parser.Scanner.VsReadToken(ref state); // !EOL and !EOF if (token != null && token.Terminal != GherkinGrammar.CurrentGrammar.Eof && token.Category != TokenCategory.Error && token.Length > 0) { tokenInfo.StartIndex = token.Location.Position; tokenInfo.EndIndex = tokenInfo.StartIndex + token.Length - 1; var gherkinKeyTerm = token.KeyTerm as GherkinKeyTerm; if (gherkinKeyTerm != null) tokenInfo.Token = (int)gherkinKeyTerm.Term; SetColorAndType(token, tokenInfo); SetTrigger(token, tokenInfo); ProcessStepIdentifiers(token, tokenInfo); return Returning(tokenInfo, state, token, true); } return Returning(tokenInfo, state, token, false); }
public bool ScanTokenAndProvideInfoAboutIt(TokenInfo tokenInfo, ref int state) { if (_lexer == null) return false; ScanTokenInfo info = _lexer.GetToken((ScanState)state); if (info == null) return false; state = (int)info.State; _lastColor = (TokenColor)info.Color; _colorizeEnd = info.ColorizeEnd; tokenInfo.Color = _lastColor; tokenInfo.Type = (TokenType) info.Type; tokenInfo.Trigger = (TokenTriggers)info.Triggers; if (info.Token == null) return false; tokenInfo.StartIndex = info.Token.Location.Column - 1; tokenInfo.EndIndex = info.Token.Location.EndColumn - 2; return !info.IsEndOfLine; }
public bool GetNextToken(TokenInfo tokenInfo, ref ParseState state) { bool foundToken = false; int index = _offset; if (index >= _source.Length) return false; int endIndex = -1; switch (state) { case ParseState.InSingleQuotes: state = HandleSingleQuotes(out endIndex, ref foundToken); break; case ParseState.InDoubleQuotes: state = HandleDoubleQuotes(out endIndex, ref foundToken); break; case ParseState.InSingleQuoteHeredoc: state = HandleHeredoc('\'', state, out endIndex); break; case ParseState.InDoubleQuoteHeredoc: state = HandleHeredoc('"', state, out endIndex); break; case ParseState.InBlockComment: state = HandleBlockComment(out endIndex); break; case ParseState.InText: state = HandleToken(ref foundToken, ref endIndex); break; } tokenInfo.EndIndex = endIndex; _offset = endIndex + 1; return foundToken; }
// State argument: http://social.msdn.microsoft.com/Forums/en-US/vsx/thread/38939d76-6f8b-473f-9ee1-fc3ae7b59cce public bool ScanTokenAndProvideInfoAboutIt(MVSP.TokenInfo tokenInfo, ref int state) { if (_tokenIdx == _parser.Tokens.Count) { return(false); } var xToken = _parser.Tokens[_tokenIdx]; _tokenIdx++; tokenInfo.Token = (int)xToken.Type; tokenInfo.StartIndex = xToken.SrcPosStart; tokenInfo.EndIndex = xToken.SrcPosEnd; if (TokenMap.TryGetValue(xToken.Type, out var xTokenData)) { tokenInfo.Type = xTokenData.Type; tokenInfo.Color = xTokenData.Color; } else { tokenInfo.Type = MVSP.TokenType.Unknown; tokenInfo.Color = MVSP.TokenColor.Text; } return(true); }
public void ResolveBooTokenStartAndEndIndex(antlr.CommonToken token, TokenInfo tokenInfo) { int oneCharBack = token.getColumn() - 1; int lengthOfTokenText = token.getText() == null ? 0 : token.getText().Length; int oneCharAfterToken = token.getColumn() + lengthOfTokenText; // single quoted string if (token.Type == BooLexer.SINGLE_QUOTED_STRING || token.Type == BooLexer.DOUBLE_QUOTED_STRING) { tokenInfo.StartIndex = oneCharBack; tokenInfo.EndIndex = oneCharAfterToken; } else if (token.Type == BooLexer.TRIPLE_QUOTED_STRING) { tokenInfo.StartIndex = oneCharBack; tokenInfo.EndIndex = oneCharBack+ 5 + token.getText().Length; } else if (token.Type == 1) { return; } else { tokenInfo.StartIndex = oneCharBack; tokenInfo.EndIndex = oneCharBack + (token.getText().Length - 1); } }
public override int ColorizeLine(int line, int length, IntPtr ptr, int state, uint[] attrs) { if (attrs == null) return state; int linepos = 0; // Must initialize the colors in all cases, otherwise you get // random color junk on the screen. for (linepos = 0; linepos < attrs.Length; linepos++) attrs[linepos] = (uint)TokenColor.Text; if (this.Scanner != null) { try { string text = Marshal.PtrToStringUni(ptr, length); this.Scanner.SetSource(text, 0); TokenInfo tokenInfo = new TokenInfo(); tokenInfo.EndIndex = -1; while (this.Scanner.ScanTokenAndProvideInfoAboutIt(tokenInfo, ref state)) { for (linepos = tokenInfo.StartIndex; linepos <= tokenInfo.EndIndex; linepos++) { if (linepos >= 0 && linepos < attrs.Length) { attrs[linepos] = (uint)tokenInfo.Color; } } } } catch (Exception) { // Ignore exceptions } } return state; }
public bool ScanTokenAndProvideInfoAboutIt(TokenInfo tokenInfo, ref int state) { // Reads each token in a source line and performs syntax coloring. It will continue to // be called for the source until false is returned. Token token = parser.Scanner.VsReadToken(ref state); // !EOL and !EOF if (token != null && token.Terminal != Grammar.CurrentGrammar.Eof && token.Category != TokenCategory.Error) { tokenInfo.StartIndex = token.Location.Position; tokenInfo.EndIndex = tokenInfo.StartIndex + token.Length - 1; if (token.EditorInfo != null) { tokenInfo.Color = (Microsoft.VisualStudio.Package.TokenColor)token.EditorInfo.Color; tokenInfo.Type = (Microsoft.VisualStudio.Package.TokenType)token.EditorInfo.Type; } if (token.KeyTerm != null && token.KeyTerm.EditorInfo != null) { tokenInfo.Trigger = (Microsoft.VisualStudio.Package.TokenTriggers)token.KeyTerm.EditorInfo.Triggers; } else { if (token.EditorInfo != null) { tokenInfo.Trigger = (Microsoft.VisualStudio.Package.TokenTriggers)token.EditorInfo.Triggers; } } return true; } return false; }
public WhenAttemptingToGetATokenFromThePegLexer() : base() { lexer = Mocks.PartialMock<PegLexer>(); token = new TokenInfo(); pegToken = new PegToken(); }
public bool ScanTokenAndProvideInfoAboutIt(TokenInfo tokenInfo, ref int state) { try { _reusableToken = lexer.nextToken() as antlr.CommonToken; } catch (antlr.TokenStreamRecognitionException e) { // supress that shiiet } // resolve token start and stop positions. Need // to do this first. ResolveBooTokenStartAndEndIndex(_reusableToken, tokenInfo); // if we get an EOF token, we're done. if (_reusableToken.Type == 1) { tokenInfo.Type = TokenType.WhiteSpace; // if we're in a ML_COMMENT zone, let's // just make sure that everything is // parsed as a comment... if (state == 13) { tokenInfo.StartIndex = 0; tokenInfo.EndIndex = _currentLine.Length; tokenInfo.Type = TokenType.Comment; tokenInfo.Color = TokenColor.Comment; } return false; } else if (state == 13) { _reusableToken.Type = BooLexer.ML_COMMENT; } if (_reusableToken.Type == BooLexer.ML_COMMENT) { state = 13; // how to determine if we're "out" of the ML_COMMENT? // if the token's getText() ends with "", then we // should assume that we're ending an ML_COMMENT region.. if (_reusableToken.getFilename().Equals("LEAVINGML_COMMENT")) { state = 0; } // handle an issue where we're hitting an endless loop // in the parser if (_reusableToken.Type == 120 && _reusableToken.getColumn() >= _currentLine.Length) { tokenInfo.Type = TokenType.WhiteSpace; return false; } } // set up token color and type ResolveBooTokenTypeAndColor(_reusableToken, tokenInfo); return true; }
public virtual int ColorizeLine(int line, int length, IntPtr ptr, int state, uint[] attrs) { if (this.languageService == null) return 0; if (this.scanner == null) return 0; string text = Marshal.PtrToStringUni(ptr, length); this.scanner.SetSource(text, 0); TokenInfo tokenInfo = new TokenInfo(); tokenInfo.endIndex = -1; bool firstTime = true; int linepos = 0; while (this.scanner.ScanTokenAndProvideInfoAboutIt(tokenInfo, ref state)){ if (firstTime){ if (attrs != null && tokenInfo.startIndex > 0) { for (linepos = 0; linepos < tokenInfo.startIndex-1; linepos++) attrs[linepos] = (uint)TokenColor.Text; } firstTime = false; } if (attrs != null){ for (; linepos < tokenInfo.startIndex; linepos++) attrs[linepos] = (uint)TokenColor.Text; for (; linepos <= tokenInfo.endIndex; linepos++) attrs[linepos] = (uint)tokenInfo.color; } } if (linepos < length-1 && attrs != null) { for (; linepos < length; linepos++) attrs[linepos] = (uint)TokenColor.Text; } return state; }
public WhenTranslatingTokens() : base() { pegToken = new ColorizerToken(); ideToken = new TokenInfo(); scanner = new BooScanner(); }
public bool ScanTokenAndProvideInfoAboutIt(TokenInfo tokenInfo, ref int state) { int start, end; ShaderToken token = (ShaderToken)lex.GetNext(ref state, out start, out end); // !EOL and !EOF if (token != ShaderToken.EOF) { tokenInfo.StartIndex = start; tokenInfo.EndIndex = end; switch (token) { case ShaderToken.KEYWORD: case ShaderToken.TYPE: case ShaderToken.KEYWORD_FX: tokenInfo.Color = TokenColor.Keyword; tokenInfo.Type = TokenType.Keyword; break; case ShaderToken.COMMENT: tokenInfo.Color = TokenColor.Comment; tokenInfo.Type = TokenType.Comment; break; case ShaderToken.COMMENT_LINE: tokenInfo.Color = TokenColor.Comment; tokenInfo.Type = TokenType.LineComment; break; case ShaderToken.NUMBER: case ShaderToken.FLOAT: tokenInfo.Color = TokenColor.Number; tokenInfo.Type = TokenType.Literal; break; case ShaderToken.STRING_LITERAL: tokenInfo.Color = TokenColor.String; tokenInfo.Type = TokenType.Literal; break; case ShaderToken.INTRINSIC: // hugly. TODO generate a NShaderTokenColor to keep tracks of 6-7-8 TokenColors tokenInfo.Color = (TokenColor)6; tokenInfo.Type = TokenType.Identifier; break; case ShaderToken.KEYWORD_SPECIAL: tokenInfo.Color = (TokenColor)7; tokenInfo.Type = TokenType.Identifier; break; case ShaderToken.PREPROCESSOR: tokenInfo.Color = (TokenColor)8; tokenInfo.Type = TokenType.Keyword; break; default: tokenInfo.Color = TokenColor.Text; tokenInfo.Type = TokenType.Text; break; } return true; } return false; }
protected void BuildTrimmed(TokenType type) { List<TokenInfo> trimList = new List<TokenInfo>(from i in tokens where i.Type == type select i); Assert.IsTrue(trimList.Count == 1, "Expected 1, actual " + trimList.Count.ToString()); trimmed = trimList[0]; }
public override Declarations GetDeclarations(MVTI.IVsTextView view, int line, int col, TokenInfo info, ParseReason reason) { return null; }
}//SetSource public bool ScanTokenAndProvideInfoAboutIt( TokenInfo token, ref int state ) { if( (token == null) || (Line == null) ) { return false; } return false; }//ScanTokenAndProvideInfoAboutIt
public override void MethodTip(IVsTextView textView, int line, int index, TokenInfo info) { // BeginParse(); // ParseResultHandler handler; // handler. base.MethodTip(textView, line, index, info); // BeginParse(line, index, info, ParseReason.MethodTip, textView, new ParseResultHandler(HandleMethodTipResponse)); }
public bool ScanTokenAndProvideInfoAboutIt(TokenInfo tokenInfo, ref int state) { Lexer.NextToken(colorizerToken,ref state); TranslatePegToken(colorizerToken, tokenInfo); if (colorizerToken.Type == PegTokenType.EOL) return false; return true; }
public bool ScanTokenAndProvideInfoAboutIt(TokenInfo tokenInfo, ref int state) { if (tokenInfo.EndIndex == src.Length) return false; tokenInfo.Color = TokenColor.Keyword; tokenInfo.Type = TokenType.Keyword; tokenInfo.Trigger = TokenTriggers.None; tokenInfo.EndIndex = src.Length; return true; }
/// <summary> /// Processes a token for coloring /// </summary> /// <param name="tokenInfo">TokenInfo object to store info into</param> /// <param name="token">The token itself</param> /// <param name="start">Start index of token</param> /// <param name="end">End index of token</param> /// <returns>True on success, false otherwise</returns> private bool processToken(TokenInfo tokenInfo, int token, int start, int end) { Configuration.TokenDefinition definition = Configuration.GetDefinition(token); tokenInfo.StartIndex = start; tokenInfo.EndIndex = end; tokenInfo.Color = definition.TokenColor; tokenInfo.Type = definition.TokenType; tokenInfo.Trigger = definition.TokenTriggers; return true; }
bool IScanner.ScanTokenAndProvideInfoAboutIt(TokenInfo tokenInfo, ref int state) { // The caller will SetSource() to set the line that is to be parsed. // Then the ScanTokenAndProvideInfoAboutIt method is typically called repeatedly until all tokens are obtained. // The state value is cached line-by-line by the buffer wich keep a copy of it // That's used to keep the current line-state tokenInfo.Type = TokenType.Unknown; tokenInfo.Color = TokenColor.Text; // True -> Again; False -> Stop return false; }
public override void SetUp() { base.SetUp(); // 0 1 2 3 4 // 012345678901234567890123456789012345678901 rawCodeString = "someMethodCall('a string') // line Comment"; BuildTokens(rawCodeString); List<TokenInfo> commentTokens = new List<TokenInfo>(from i in tokens where i.Type == TokenType.Comment select i); Assert.IsTrue(commentTokens.Count == 1); lineCommentToken = commentTokens[0]; }
public WhenParsingCodeStringsWithDoubleWhackLineComments() : base() { // 0 1 2 3 4 // 012345678901234567890123456789012345678901 rawCodeString = "someMethodCall('a string') // line Comment"; BuildTokens(rawCodeString); List<TokenInfo> commentTokens = new List<TokenInfo>(from i in tokens where i.Type == TokenType.Comment select i); Assert.True(commentTokens.Count == 1); lineCommentToken = commentTokens[0]; }
public void ShouldParseAttemptedDoubleLineCommentAsOneLineComment() { // 0 1 2 3 4 5 6 // 01234567890123456789012345678901234567890123456789012345678901234 rawCodeString = "someMethodCall('a string') // line Comment // double line comment"; BuildTokens(rawCodeString); List<TokenInfo> commentTokens = new List<TokenInfo>(from i in tokens where i.Type == TokenType.Comment select i); Assert.True(commentTokens.Count == 1, "Expected 1, actual: "+commentTokens.Count.ToString()); //Assert.Fail("comment 1's start: "+commentTokens[0].StartIndex.ToString()+" comment 2's start: "+commentTokens[1].StartIndex.ToString()); lineCommentToken = commentTokens[0]; }
public virtual void BuildTokens(string codeLine, int startOffset) { bool moreTokens = true; int state = 0; scanner.SetSource(codeLine, startOffset); while(moreTokens) { TokenInfo token = new TokenInfo(); moreTokens = scanner.ScanTokenAndProvideInfoAboutIt(token, ref state); if (moreTokens) // no EOL token, since the IDE doesn't care about it anywho tokens.Add(token); } }
public bool ScanTokenAndProvideInfoAboutIt(TokenInfo tokenInfo, ref int state) { bool found = false; if(tokenInfo != null) { found = GetNextToken(_offset, tokenInfo, ref state); if(found) { _offset = tokenInfo.EndIndex + 1; } } return found; }
bool IScanner.ScanTokenAndProvideInfoAboutIt(TokenInfo tokenInfo, ref int state) { tokenizer.BlockComment = state == 1 ? true : false; string token = tokenizer.NextToken(); if (token == null) return false; token = token.Trim(); tokenInfo.StartIndex = tokenizer.StartIndex; tokenInfo.EndIndex = tokenizer.StopIndex; tokenInfo.Type = GetTokenType(token); tokenInfo.Color = GetTokenColor(tokenInfo); state = (tokenizer.BlockComment && !token.EndsWith("*/")) ? 1 : 0; return true; }
private TokenColor GetTokenColor(TokenInfo ti) { switch (ti.Type) { case TokenType.Comment: case TokenType.LineComment: return TokenColor.Comment; case TokenType.Literal: return TokenColor.String; case TokenType.Keyword: return TokenColor.Keyword; default: return TokenColor.Text; } }
// State argument: http://social.msdn.microsoft.com/Forums/en-US/vsx/thread/38939d76-6f8b-473f-9ee1-fc3ae7b59cce bool MVSP.IScanner.ScanTokenAndProvideInfoAboutIt(MVSP.TokenInfo aTokenInfo, ref int aState) { if (mTokenIdx == mParser.Tokens.Count) { return(false); } var xToken = mParser.Tokens[mTokenIdx]; mTokenIdx++; aTokenInfo.Token = (int)xToken.Type; aTokenInfo.StartIndex = xToken.SrcPosStart; aTokenInfo.EndIndex = xToken.SrcPosEnd; var xTokenData = mTokenMap[(int)xToken.Type]; aTokenInfo.Type = xTokenData.Type; aTokenInfo.Color = xTokenData.Color; return(true); }