protected void FindCSharpTokens(List <ITagSpan <IClassificationTag> > tags, string text, int sourceStart, int length) { var ml = new MessageList(); var lxr = new CSLexer(new StringSource(text.Substring(sourceStart, length)), ml); lxr.AnalyzeAll(); for (var i = 0; i < lxr.Tokens.Count; i++) { NfxTokenTypes?curType = null; var token = lxr.Tokens[i]; if (token.IsComment) { curType = NfxTokenTypes.Comment; } else if (token.IsKeyword) { curType = NfxTokenTypes.KeyWord; } else if (token.IsLiteral) { curType = NfxTokenTypes.Literal; } else if (token.IsSymbol || token.IsOperator) { curType = NfxTokenTypes.Brace; } if (curType.HasValue) { tags.Add(CreateTagSpan(sourceStart + token.StartPosition.CharNumber - 1, token.Text.Length, curType.Value)); } } }
private void parse() { var source = new StringSource(m_Pattern); var lexer = new CSLexer(source, throwErrors: true); var tokens = lexer.ToList(); m_Chunks = new List <chunk>(); var wasWildcard = false; var buf = string.Empty; var portion = chunkPortion.Path; var capture = false; Action flushBuf = () => { buf = buf.Trim(); if (buf.Length == 0) { return; } if (wasWildcard) { throw new WaveException(StringConsts.URI_WILDCARD_PARSE_ERROR); } if (capture) { //reparse buf var wildcard = buf.StartsWith("*"); if (wildcard) { buf = buf.Remove(0, 1).Trim(); if (buf.Length == 0) { buf = "ALL"; } } var segs = buf.Split('='); if (segs.Length == 2) { m_Chunks.Add(new chunk { Name = segs[0], DefaultValue = segs[1], Portion = portion, IsVar = true, IsWildcard = wildcard }); } else { m_Chunks.Add(new chunk { Name = buf, Portion = portion, IsVar = true, IsWildcard = wildcard }); } if (wildcard) { wasWildcard = true; } } else { m_Chunks.Add(new chunk { Name = Uri.UnescapeDataString(buf), Portion = portion }); } buf = string.Empty; }; for (var i = 0; i < tokens.Count; i++) { var token = tokens[i]; if (!token.IsPrimary) { continue; //skip comments etc. } if (!capture && token.Type == CSTokenType.tBraceOpen) { flushBuf(); capture = true; continue; } if (capture && token.Type == CSTokenType.tBraceClose) { flushBuf(); capture = false; continue; } if (capture) { buf += token.Text; continue; } if (token.Type == CSTokenType.tDiv) { flushBuf(); m_Chunks.Add(new chunk { IsPathDiv = true, Name = "/", Portion = portion }); continue; } if (token.Type == CSTokenType.tTernaryIf) { flushBuf(); portion = chunkPortion.Query; continue; } buf += token.Text; } flushBuf(); }
private void parse() { var source = new StringSource(m_Pattern); var lexer = new CSLexer(source, throwErrors: true); var tokens = lexer.ToList(); m_Chunks = new List<chunk>(); var wasWildcard = false; var buf = string.Empty; var portion = chunkPortion.Path; var capture = false; Action flushBuf = () => { buf = buf.Trim(); if (buf.Length==0) return; if (wasWildcard) throw new WaveException(StringConsts.URI_WILDCARD_PARSE_ERROR); if (capture) { //reparse buf var wildcard = buf.StartsWith("*"); if (wildcard) { buf = buf.Remove(0,1).Trim(); if (buf.Length==0) buf = "ALL"; } var segs = buf.Split('='); if (segs.Length==2) m_Chunks.Add( new chunk{ Name = segs[0], DefaultValue = segs[1], Portion = portion, IsVar = true, IsWildcard = wildcard}); else m_Chunks.Add( new chunk{ Name = buf, Portion = portion, IsVar = true, IsWildcard = wildcard}); if (wildcard) wasWildcard = true; } else m_Chunks.Add( new chunk{ Name = Uri.UnescapeDataString(buf), Portion = portion}); buf = string.Empty; }; for(var i=0; i<tokens.Count; i++) { var token = tokens[i]; if (!token.IsPrimary) continue;//skip comments etc. if (!capture && token.Type==CSTokenType.tBraceOpen) { flushBuf(); capture = true; continue; } if (capture && token.Type==CSTokenType.tBraceClose) { flushBuf(); capture = false; continue; } if (capture) { buf+=token.Text; continue; } if (token.Type==CSTokenType.tDiv) { flushBuf(); m_Chunks.Add( new chunk{ IsPathDiv=true, Name = "/", Portion = portion}); continue; } if (token.Type==CSTokenType.tTernaryIf) { flushBuf(); portion = chunkPortion.Query; continue; } buf+=token.Text; } flushBuf(); }
public CSToken(CSLexer lexer, CSTokenType type, SourcePosition startPos, SourcePosition endPos, string text, object value = null) : base(lexer, startPos, endPos, text, value) { Type = type; }