public void ParseSourceWithNewLineShouldIncludeNewLineToken() { String source = "source\ncode"; IList <TokenBase> input = new List <TokenBase>(); input.Add(TokenCreator.Create <GenericToken>("source", null)); TokenCreator.Advance(1); input.Add(TokenCreator.Create <GenericToken>("code", null)); NewLineToken newLine = new NewLineToken(1, 7, 6); IEnumerable <TokenBase> parsed = _parser.Parse(source, input); Assert.AreEqual(newLine, parsed.ElementAt(1)); }
public void ParseSourceWithTwoNewLineShouldIncludeTwoNewLineTokens() { String source = "source\n\ncode"; IList <TokenBase> input = new List <TokenBase>(); input.Add(TokenCreator.Create <GenericToken>("source", null)); TokenCreator.NewLine(2); input.Add(TokenCreator.Create <GenericToken>("code", null)); NewLineToken newLineAlpha = new NewLineToken(1, 7, 6); NewLineToken newLineBravo = new NewLineToken(2, 1, 7); IEnumerable <TokenBase> parsed = _parser.Parse(source, input); Assert.AreEqual(newLineAlpha, parsed.ElementAt(1)); Assert.AreEqual(newLineBravo, parsed.ElementAt(2)); }
public void ParseSourceWithFiveNewLineShouldIncludeFiveNewLineTokens() { String source = "source\n\n\n\n\ncode"; IList <TokenBase> input = new List <TokenBase>(); input.Add(TokenCreator.Create <GenericToken>("source", null)); TokenCreator.NewLine(5); input.Add(TokenCreator.Create <GenericToken>("code", null)); NewLineToken newLineAlpha = new NewLineToken(1, 7, 6); NewLineToken newLineBravo = new NewLineToken(2, 1, 7); NewLineToken newLineCharlie = new NewLineToken(3, 1, 8); NewLineToken newLineDelta = new NewLineToken(4, 1, 9); NewLineToken newLineEcho = new NewLineToken(5, 1, 10); IEnumerable <TokenBase> parsed = _parser.Parse(source, input); Assert.AreEqual(newLineAlpha, parsed.ElementAt(1)); Assert.AreEqual(newLineBravo, parsed.ElementAt(2)); Assert.AreEqual(newLineCharlie, parsed.ElementAt(3)); Assert.AreEqual(newLineDelta, parsed.ElementAt(4)); Assert.AreEqual(newLineEcho, parsed.ElementAt(5)); }
public virtual void Visit(NewLineToken newLineToken) { }
public Token GetToken() { State = LexicAnalyserState.Initial; Value = HasNext ? Character.ToString() : ""; Token token = null; while (HasNext && token == null) { switch (State) { case LexicAnalyserState.Initial: HandleInitial(); break; case LexicAnalyserState.NonTerminal: token = new NonTerminalToken(Value); CurrentIndex++; break; case LexicAnalyserState.Space: token = new SpaceToken(); CurrentIndex++; break; case LexicAnalyserState.Terminal: token = new TerminalToken(Value); CurrentIndex++; break; case LexicAnalyserState.Empty: token = new EmptyToken(); CurrentIndex++; break; case LexicAnalyserState.NewLine: token = new NewLineToken(Value); CurrentIndex++; break; case LexicAnalyserState.Identifier: token = new IdentifierToken(Value); CurrentIndex++; break; case LexicAnalyserState.Number: token = new NumberToken(Value); CurrentIndex++; break; case LexicAnalyserState.Var: token = new VarToken(); CurrentIndex++; break; case LexicAnalyserState.Write: token = new WriteToken(); CurrentIndex++; break; case LexicAnalyserState.Read: token = new ReadToken(); CurrentIndex++; break; case LexicAnalyserState.If: token = new IfToken(); CurrentIndex++; break; case LexicAnalyserState.End: token = new EndToken(); CurrentIndex++; break; case LexicAnalyserState.Then: token = new ThenToken(); CurrentIndex++; break; case LexicAnalyserState.Begin: token = new BeginToken(); CurrentIndex++; break; case LexicAnalyserState.While: token = new WhileToken(); CurrentIndex++; break; case LexicAnalyserState.Do: token = new DoToken(); CurrentIndex++; break; case LexicAnalyserState.SemiColon: token = new SemiColonToken(); CurrentIndex++; break; case LexicAnalyserState.OpenParentheses: token = new OpenParenthesesToken(); CurrentIndex++; break; case LexicAnalyserState.CloseParentheses: token = new CloseParenthesesToken(); CurrentIndex++; break; case LexicAnalyserState.Plus: token = new PlusToken(); CurrentIndex++; break; case LexicAnalyserState.Sub: token = new SubToken(); CurrentIndex++; break; case LexicAnalyserState.Great: token = new GreatToken(); CurrentIndex++; break; case LexicAnalyserState.Less: token = new LessToken(); CurrentIndex++; break; case LexicAnalyserState.Equal: token = new EqualToken(); CurrentIndex++; break; case LexicAnalyserState.Repeat: token = new RepeatToken(); CurrentIndex++; break; case LexicAnalyserState.Until: token = new UntilToken(); CurrentIndex++; break; case LexicAnalyserState.Attribution: token = new AttributionToken(); CurrentIndex++; break; case LexicAnalyserState.NotEqual: token = new NotEqualToken(); CurrentIndex++; break; case LexicAnalyserState.GreatOrEqual: token = new GreatOrEqualToken(); CurrentIndex++; break; case LexicAnalyserState.LessOrEqual: token = new LessOrEqualToken(); CurrentIndex++; break; default: throw new ArgumentOutOfRangeException(); } } return(token); }
private bool TryCompileTokenText(MessageScriptParser.TokenTextContext context, out List <TokenText> lines) { LogContextInfo(context); lines = new List <TokenText>(); TokenTextBuilder lineBuilder = null; foreach (var node in context.children) { IToken lineToken; if (TryCast <MessageScriptParser.TokenContext>(node, out var tagContext)) { if (!TryGetFatal(context, () => tagContext.Identifier(), "Expected tag id", out var tagIdNode)) { return(false); } var tagId = tagIdNode.Symbol.Text; switch (tagId.ToLowerInvariant()) { case "f": { if (!TryCompileFunctionToken(tagContext, out var functionToken)) { mLogger.Error("Failed to compile function token"); return(false); } lineToken = functionToken; } break; case "n": lineToken = new NewLineToken(); break; case "e": { if (lineBuilder == null) { LogWarning(context, "Empty line"); lines.Add(new TokenText()); } else { lines.Add(lineBuilder.Build()); lineBuilder = null; } continue; } case "x": { if (!TryCompileCodePointToken(tagContext, out var codePointToken)) { mLogger.Error("Failed to compile code point token"); return(false); } lineToken = codePointToken; } break; default: { lineToken = null; var wasAliasedFunction = false; if (Library != null) { wasAliasedFunction = TryCompileAliasedFunction(tagContext, tagId, out var functionToken); lineToken = functionToken; } if (!wasAliasedFunction) { LogError(tagContext, $"Unknown tag with id {tagId}"); return(false); } break; } } } else if (TryCast <ITerminalNode>(node, out var textNode)) { var text = textNode.Symbol.Text; var textWithoutNewlines = Regex.Replace(text, @"\r?\n", ""); if (textWithoutNewlines.Length == 0) { continue; // filter out standalone newlines } lineToken = new StringToken(textWithoutNewlines); } else { if (node is ParserRuleContext) { LogError(node as ParserRuleContext, "Expected a tag or text, but got neither."); } else { LogError(context, "Expected a tag or text, but got neither."); } return(false); } if (lineBuilder == null) { lineBuilder = new TokenTextBuilder(); } Debug.Assert(lineToken != null, "Line token shouldn't be null"); lineBuilder.AddToken(lineToken); } if (lineBuilder != null) { lines.Add(lineBuilder.Build()); } return(true); }
public void Decompile(NewLineToken token) { WriteTag("n"); }
private NewLineToken LoadNewLine(XmlNode newLineNode, int index) { using (this.StackFrame(newLineNode, index)) { if (!VerifyNodeHasNoChildren(newLineNode)) { return null; } NewLineToken nlt = new NewLineToken(); return nlt; } }