internal TokenInfo(SourceSpan span, TokenCategory category, TokenTriggers trigger, Token token) { _category = category; _trigger = trigger; _span = span; _token = token; }
public KeyTerm Brace(string name, TokenTriggers trigger = TokenTriggers.None) { return(new KeyTerm(name, name) { EditorInfo = new TokenEditorInfo(TokenType.Delimiter, TokenColor.Text, trigger | TokenTriggers.MatchBraces) }); }
public override void Init(GrammarData grammarData) { base.Init(grammarData); if (this.EditorInfo != null) { return; } TokenType tknType = TokenType.Identifier; if (IsSet(TermOptions.IsOperator)) { tknType |= TokenType.Operator; } else if (IsSet(TermOptions.IsDelimiter | TermOptions.IsPunctuation)) { tknType |= TokenType.Delimiter; } TokenTriggers triggers = TokenTriggers.None; if (this.IsSet(TermOptions.IsBrace)) { triggers |= TokenTriggers.MatchBraces; } if (this.IsSet(TermOptions.IsMemberSelect)) { triggers |= TokenTriggers.MemberSelect; } TokenColor color = TokenColor.Text; if (IsSet(TermOptions.IsKeyword)) { color = TokenColor.Keyword; } this.EditorInfo = new TokenEditorInfo(tknType, color, triggers); }
public TokenTableEntry(ScanContext inputContext, ScanContext outputContext, string pattern, TokenColor color, TokenTriggers triggers) { this.inputContext = inputContext; this.outputContext = outputContext; this.regExpression = new Regex(/*"\\G" + */ pattern); // \G = "anchor to the current position" this.tokenColor = color; this.tokenTriggers = triggers; }
public override void HandlePostExec(ref Guid guidCmdGroup, uint nCmdId, uint nCmdexecopt, IntPtr pvaIn, IntPtr pvaOut, bool bufferWasChanged) { VsCommands2K cmd = (VsCommands2K)nCmdId; // Special handling of "Toggle all outlining" command //CodingUnit: 2010.02.19 normal action back in Toggle All Outlining /*if (guidCmdGroup == typeof(VsCommands2K).GUID) * { * if ((VsCommands2K)nCmdId == VsCommands2K.OUTLN_TOGGLE_ALL) * { * Source.CollapseAllRegions(); * return; * } * }*/ base.HandlePostExec(ref guidCmdGroup, nCmdId, nCmdexecopt, pvaIn, pvaOut, bufferWasChanged); if (guidCmdGroup == VSConstants.VSStd2K) { // workaround: for some reason, UP and DOWN commands are not passed to Source in base.HandlePostExec if (cmd == VsCommands2K.UP || cmd == VsCommands2K.DOWN) { Source.OnCommand(TextView, cmd, '\0'); } if (_startLine >= 0 && Source.MethodData.IsDisplayed) { int line; int pos; TextView.GetCaretPos(out line, out pos); if (line != _startLine || pos != _startPos) { bool backward = cmd == VsCommands2K.BACKSPACE || cmd == VsCommands2K.BACKTAB || cmd == VsCommands2K.LEFT || cmd == VsCommands2K.LEFT_EXT; TokenInfo info = Source.GetTokenInfo(line, pos); TokenTriggers triggerClass = info.Trigger; if (!backward && (triggerClass & TokenTriggers.MethodTip) == TokenTriggers.ParameterNext) { Source.MethodData.AdjustCurrentParameter(1); } else { Source.MethodTip(TextView, line, pos, info); } } } } }
/// <summary> /// Returns a list of declarations based on the specified reason for parsing. /// </summary> /// <param name="view">[in] An <see cref="T:Microsoft.VisualStudio.TextManager.Interop.IVsTextView"></see> object that can be used to access the source.</param> /// <param name="line">[in] The line number where the parse operation started.</param> /// <param name="col">[in] The offset into the line where the parse operation started.</param> /// <param name="info">[in] A <see cref="T:Microsoft.VisualStudio.Package.TokenInfo"></see> structure containing information about the token at the specified position.</param> /// <param name="reason">[in] The <see cref="T:Microsoft.VisualStudio.Package.ParseReason"></see> value describing what kind of parse operation was completed.</param> /// <returns> /// If successful returns a <see cref="T:Microsoft.VisualStudio.Package.Declarations"></see> object; otherwise, returns a null value. /// </returns> public override Declarations GetDeclarations(IVsTextView view, int line, int col, TokenInfo info, ParseReason reason) { TokenTriggers triggers = info.Trigger; if (0 != (triggers & OpenParenthesisTokenTrigger)) { return(new FactEditorReferenceModeDeclarations(m_LanguageService)); } return(new FactEditorObjectTypeDeclarations(m_LanguageService)); }
public override TokenInfo ReadToken() { Token token = tokenizer.GetNext(); SourceLocation location = new SourceLocation(token.StartPosition, token.StartLine, token.StartColumn); SS.SourceSpan span = new SS.SourceSpan(ConvertToSSSrcLocation(location), ConvertToSSSrcLocation(tokenizer.Position)); TokenTriggers trigger = GetTrigger(token.Kind); TokenCategory category = GetCategory(token.Kind); return(new TokenInfo(span, category, trigger)); }
public override void Init(GrammarData grammarData) { base.Init(grammarData); this.Symbol = SymbolTable.Symbols.TextToSymbol(Text); #region comments about keyterms priority // Priority - determines the order in which multiple terminals try to match input for a given current char in the input. // For a given input char the scanner looks up the collection of terminals that may match this input symbol. It is the order // in this collection that is determined by Priority value - the higher the priority, the earlier the terminal gets a chance // to check the input. // Keywords found in grammar by default have lowest priority to allow other terminals (like identifiers)to check the input first. // Additionally, longer symbols have higher priority, so symbols like "+=" should have higher priority value than "+" symbol. // As a result, Scanner would first try to match "+=", longer symbol, and if it fails, it will try "+". // Reserved words are the opposite - they have the highest priority #endregion if (FlagIsSet(TermFlags.IsReservedWord)) { base.Priority = ReservedWordsPriority + Text.Length; } else { base.Priority = LowestPriority + Text.Length; } //Setup editor info if (this.EditorInfo != null) { return; } TokenType tknType = TokenType.Identifier; if (FlagIsSet(TermFlags.IsOperator)) { tknType |= TokenType.Operator; } else if (FlagIsSet(TermFlags.IsDelimiter | TermFlags.IsPunctuation)) { tknType |= TokenType.Delimiter; } TokenTriggers triggers = TokenTriggers.None; if (this.FlagIsSet(TermFlags.IsBrace)) { triggers |= TokenTriggers.MatchBraces; } if (this.FlagIsSet(TermFlags.IsMemberSelect)) { triggers |= TokenTriggers.MemberSelect; } TokenColor color = TokenColor.Text; if (FlagIsSet(TermFlags.IsKeyword)) { color = TokenColor.Keyword; } this.EditorInfo = new TokenEditorInfo(tknType, color, triggers); }
/// <summary> /// /// </summary> /// <param name="e"></param> protected override void OnPreviewKeyDown(KeyEventArgs e) { base.OnPreviewKeyDown(e); var Field = e.Key.ToString(); if (e.Key.ToChar() == TokenDelimiter) { OnTokenTriggered(); e.Handled = true; return; } var TriggerKey = default(TokenTriggerKey); if (Field.TryParseEnum(out TriggerKey) && TokenTriggers.Has(TriggerKey)) { OnTokenTriggered(); e.Handled = true; } }
public TokenDefinition(TokenType type, TokenColor color, TokenTriggers triggers) { this.TokenType = type; this.TokenColor = color; this.TokenTriggers = triggers; }
public static void ColorToken(string tokenName, TokenType type, TokenColor color, TokenTriggers trigger) { definitions[tokenName] = new TokenDefinition(type, color, trigger); }
public static void ColorToken(int token, TokenType type, TokenColor color, TokenTriggers trigger) { definitions[token] = new TokenDefinition(type, color, trigger); }
internal TokenInfo(SourceSpan span, TokenCategory category, TokenTriggers trigger) { Category = category; Trigger = trigger; SourceSpan = span; }
public TokenEditorInfo(TokenType type, TokenColor color, TokenTriggers triggers) { Type = type; Color = color; Triggers = triggers; }
public void AddTokenDefinition(int token, TokenType type, PuppetTokenColor color, TokenTriggers trigger) { if (!definitions.ContainsKey(token)) { definitions.Add(token, new TokenDefinition(type, color, trigger)); } }
public TokenInfo(SourceSpan span, TokenCategory category, TokenTriggers trigger) { _category = category; _trigger = trigger; _span = span; }