protected int RunLexerUntil(int stopAt)
 {
     if (_lexer.InputPosition < stopAt)
     {
         int startAt = _lexer.InputPosition;
         _tokens.ClearSpace(startAt, stopAt - startAt);
         _nestedTokens.ClearSpace(startAt, stopAt - startAt);
         for (Maybe <Token> t_; _lexer.InputPosition < stopAt && (t_ = _lexer.NextToken()).HasValue;)
         {
             Token t = t_.Value;
             if (t.EndIndex > stopAt)
             {
                 _tokens.ClearSpace(t.StartIndex, t.Length);
                 _nestedTokens.ClearSpace(t.StartIndex, t.Length);
             }
             if (t.Children != null)
             {
                 foreach (var ct in t.Children)
                 {
                     _nestedTokens[ct.StartIndex] = new EditorToken(ct.TypeInt, ct.Length, ct.Value);
                 }
             }
             if (!IsWhitespace(t.TypeInt))
             {
                 var et = new EditorToken(t.TypeInt, t.Length, t.Value);
                 _tokens[t.StartIndex] = StoreLexerError(et);
             }
         }
         if (_lexerError != null && _tokens.Count != 0)
         {
             _tokens.Last = StoreLexerError(_tokens.Last);
         }
     }
     return(_lexer.InputPosition);
 }
 EditorToken StoreLexerError(EditorToken token)
 {
     if (_lexerError != null)
     {
         _haveLexerErrors          = true;
         _lexerError.OriginalValue = token.Value;
         token.Value = _lexerError;
         _lexerError = null;
     }
     return(token);
 }
Exemple #3
0
        public static DList <Token> ToNormalTokens(SparseAList <EditorToken> eTokens)
        {
            var output = new DList <Token>();
            int?index  = null;

            for (;;)
            {
                EditorToken eTok = eTokens.NextHigherItem(ref index);
                if (index == null)
                {
                    break;
                }
                output.Add(eTok.ToToken(index.Value));
            }
            return(output);
        }
 void AddLexerErrors(NormalizedSnapshotSpanCollection spans, List <ITagSpan <ErrorTag> > errors)
 {
     if (_haveLexerErrors)
     {
         foreach (var span in spans)
         {
             int?        i = span.Start.Position + 1;
             EditorToken t = _tokens.NextLowerItem(ref i);
             while (i != null)
             {
                 do
                 {
                     if (t.Value is LexerMessage)
                     {
                         errors.Add(LexerMessageToErrorTagSpan(span.Snapshot, t.ToToken(i.Value, false), t.Value as LexerMessage));
                     }
                     t = _tokens.NextHigherItem(ref i);
                 }while (i < span.End.Position);
             }
         }
     }
 }
Exemple #5
0
		EditorToken StoreLexerError(EditorToken token)
		{
			if (_lexerError != null) {
				_haveLexerErrors = true;
				_lexerError.OriginalValue = token.Value;
				token.Value = _lexerError;
				_lexerError = null;
			}
			return token;
		}
Exemple #6
0
		protected int RunLexerUntil(int stopAt)
		{
			if (_lexer.InputPosition < stopAt)
			{
				int startAt = _lexer.InputPosition;
				_tokens.ClearSpace(startAt, stopAt - startAt);
				_nestedTokens.ClearSpace(startAt, stopAt - startAt);
				for (Maybe<Token> t_; _lexer.InputPosition < stopAt && (t_ = _lexer.NextToken()).HasValue; )
				{
					Token t = t_.Value;
					if (t.EndIndex > stopAt) {
						_tokens.ClearSpace(t.StartIndex, t.Length);
						_nestedTokens.ClearSpace(t.StartIndex, t.Length);
					}
					if (t.Children != null)
						foreach (var ct in t.Children)
							_nestedTokens[ct.StartIndex] = new EditorToken(ct.TypeInt, ct.Length, ct.Value);
					if (!IsWhitespace(t.TypeInt)) {
						var et = new EditorToken(t.TypeInt, t.Length, t.Value);
						_tokens[t.StartIndex] = StoreLexerError(et);
					}
				}
				if (_lexerError != null && _tokens.Count != 0)
					_tokens.Last = StoreLexerError(_tokens.Last);
			}
			return _lexer.InputPosition;
		}