public LexerResult <IN> Tokenize(ReadOnlyMemory <char> memorySource) { var tokens = new List <Token <IN> >(); var r = LexerFsm.Run(memorySource, 0); if (!r.IsSuccess && !r.IsEOS) { var result = r.Result; var error = new LexicalError(result.Position.Line, result.Position.Column, result.CharValue); return(new LexerResult <IN>(error)); } while (r.IsSuccess) { var transcoded = Transcode(r); if (CallBacks.TryGetValue(transcoded.TokenID, out var callback)) { transcoded = callback(transcoded); } tokens.Add(transcoded); r = LexerFsm.Run(memorySource); if (!r.IsSuccess && !r.IsEOS) { var result = r.Result; var error = new LexicalError(result.Position.Line, result.Position.Column, result.CharValue); return(new LexerResult <IN>(error)); } if (r.IsSuccess && r.Result.IsComment) { ConsumeComment(r.Result, memorySource); } } var eos = new Token <IN>(); var prev = tokens.LastOrDefault(); if (prev == null) { eos.Position = new TokenPosition(1, 0, 0); } else { eos.Position = new TokenPosition(prev.Position.Index + 1, prev.Position.Line, prev.Position.Column + prev.Value.Length); } tokens.Add(eos); return(new LexerResult <IN>(tokens)); }
public LexerException(LexicalError error) { Error = error; }
public LexerResult(LexicalError error) { IsError = true; Error = error; }