public void BoolLit_Embedded_In_Identifier() { // 'do' is embedded inside of 'doc' var tokens = new JsLexer().GetTokens("var trues = [], falses = [];").ToArray(); Assert.That(tokens.Length, Is.EqualTo(11)); }
public void Keywords_Embedded_In_Identifier() { // 'do' is embedded inside of 'doc' var tokens = new JsLexer().GetTokens("var a = doc;").ToArray(); Assert.That(tokens.Length, Is.EqualTo(5)); }
public void RegexLiteral_Parsing() { var tokens = new JsLexer().GetTokens("x = /2222/iga").ToArray(); Assert.That(tokens.Length, Is.EqualTo(3)); Assert.That(tokens[2].Type, Is.EqualTo(JsTokenType.RegexLiteral)); }
void TestScriptAgainstKnownTokens(string script, JsTokenType type, params string[] knownTokens) { // use parser to find tokens var parsed = new JsLexer() .GetTokens(script) .ToArray(); // compare size Assert.That(parsed.Length, Is.EqualTo(knownTokens.Length)); // compare individuals for (int i = 0; i < knownTokens.Length; ++i) { Assert.That(parsed[i].Text, Is.EqualTo(knownTokens[i])); Assert.That(parsed[i].Type, Is.EqualTo(type), parsed[i].Text); } }
public void Div_Operator_Parsing() { // need special parsing to find / and /= since we must distinguish between operator and regex literal. string script = @"2 / 4, a /= 3"; // use parser to find tokens var parsed = new JsLexer() .GetTokens(script) .ToArray(); // compare size Assert.That(parsed.Length, Is.EqualTo(7)); Assert.That(parsed[1].Text, Is.EqualTo("/")); Assert.That(parsed[1].Type, Is.EqualTo(JsTokenType.Operator)); Assert.That(parsed[5].Text, Is.EqualTo("/=")); Assert.That(parsed[5].Type, Is.EqualTo(JsTokenType.Operator)); }
public string EditText(string text) { AntlrInputStream inputStream = new AntlrInputStream(text); ITokenSource lexer; IVisitorTree visitor; IChangeTokenSource editorTokens; lexer = new JsLexer(inputStream); visitor = new JsVisitorChangeLiteralString(factoryNames, lexer.TokenFactory); editorTokens = new BaseJsEditTokens(visitor); lexer = editorTokens.Edit(lexer); CommonTokenStream cs = new CommonTokenStream(lexer); cs.Fill(); return(cs.GetText()); }
List <JsToken> BuildTokenList(ContainerNode node) { IEnumerable <JsToken> tokens = new JsLexer().GetTokens( _source , node.Header.End // begin , node.Footer.Begin // end ); // simple way to make list but it crashes if we hit invalid token // var tokenList = tokens.ToList(); // more complicated but it survives invalid token exceptions List <JsToken> tokenList = new List <JsToken>(); try{ foreach (var token in tokens) { tokenList.Add(token); } }catch {} // incase we hit invalid javascript, we don't want the whole document to die return(tokenList); }
public void DoubleBang() { var tokens = new JsLexer().GetTokens("x = !!y;").ToArray(); Assert.That(tokens.Length, Is.EqualTo(6)); }
public void WhiteSpace_And_Comments() { var tokens = new JsLexer().GetTokens("/*bob*/ // fffff 1 3 5 ").ToArray(); Assert.That(tokens.Length, Is.EqualTo(0)); }