public void TestTokenize0xff() { var CTokenizer = new CTokenizer("0xff"); var Tokens = CTokenizer.Tokenize().ToArray(); Assert.AreEqual(CTokenType.Integer, Tokens[0].Type); }
static public string RemoveComments(string Input) { var CTokenizer = new CTokenizer(Input, TokenizeSpaces: true); var Tokens = CTokenizer.Tokenize().GetEnumerator(); string Output = ""; while (Tokens.MoveNext()) { switch (Tokens.Current.Raw) { case "//": Output += new String(' ', CTokenizer.SkipUntilSequence("\n") - 1) + "\n"; break; case "/*": Output += ReplaceNonSpaceWithSpaces(CTokenizer.ReadUntilSequence("*/")); break; default: Output += Tokens.Current.Raw; break; } } return(Output); }
public void TestTokenizeLineFeeds() { var CTokenizer = new CTokenizer("\n\na"); var Tokens = CTokenizer.Tokenize().ToArray(); Assert.AreEqual(2, Tokens[0].Position.Row); }
public void TestTokenize2() { var CTokenizer = new CTokenizer("/* comment's */", TokenizeSpaces: true); var Tokens = CTokenizer.Tokenize().GetEnumerator(); Tokens.MoveNext(); }
public void TestTokenizeStringCat() { var CTokenizer = new CTokenizer(@" ""a"" ""b"" "); var Tokens = CTokenizer.Tokenize().ToArray(); Console.WriteLine(String.Join("\n", Tokens.Select(Token => Token.Raw))); //Assert.AreEqual(CTokenType.Integer, Tokens[0].Type); }
public void TestTokenize4() { var CTokenizer = new CTokenizer("test\n #include", TokenizeSpaces: false); var Tokens = CTokenizer.Tokenize().ToArray(); Assert.AreEqual("Position:0, Row:0, Column:0, ColumnNoSpaces:0", Tokens[0].Position.ToString()); Assert.AreEqual("Position:7, Row:1, Column:2, ColumnNoSpaces:0", Tokens[1].Position.ToString()); Assert.AreEqual("Position:8, Row:1, Column:3, ColumnNoSpaces:1", Tokens[2].Position.ToString()); }
public void TestTokenizeDouble() { var CTokenizer = new CTokenizer("1.0, .0f"); var Tokens = CTokenizer.Tokenize().ToArray(); CollectionAssert.AreEqual( new[] { "1.0", ",", ".0f", "" }, Tokens.Select(Item => Item.Raw).ToArray() ); }
public void TestTokenize3() { var CTokenizer = new CTokenizer("1, 2, 0x100"); var Tokens = CTokenizer.Tokenize().ToArray(); CollectionAssert.AreEqual( new[] { "1", ",", "2", ",", "0x100", "" }, Tokens.Select(Item => Item.Raw).ToArray() ); }
public void TestTokenize() { var CTokenizer = new CTokenizer(" 'a' && 'b' test + 2 * test3"); var Tokens = CTokenizer.Tokenize().ToArray(); CollectionAssert.AreEqual( new[] { "'a'", "&&", "'b'", "test", "+", "2", "*", "test3", "" }, Tokens.Select(Item => Item.Raw).ToArray() ); }
public void TestTokenizeStringFormat1() { var CTokenizer = new CTokenizer(@" ""\03a"" "); var Tokens = CTokenizer.Tokenize().ToArray(); Console.WriteLine(Tokens[0].Raw); var Str = Tokens[0].GetStringValue(); Assert.AreEqual(3, Str[0]); Assert.AreEqual('a', Str[1]); }
public TokenizerTestHelper(string theString) { ReadOnlySequence <char> seq = new ReadOnlySequence <char>(theString.AsMemory()); CTokenizer tokenizer = new CTokenizer(seq); while (true) { OperationStatus status = tokenizer.Read(); if (status == OperationStatus.NeedMoreData) { if (tokenizer.CurrentToken.TokenType != TokenType.Unknown) { _tokens.Add(tokenizer.CurrentToken); } break; } _tokens.Add(tokenizer.CurrentToken); } }
/// <summary> /// /// </summary> /// <param name="FileName"></param> /// <param name="Text"></param> /// <param name="Context"></param> public CPreprocessorInternal(string FileName, string Text, CPreprocessorContext Context) { // Remove comments. try { FileName = Path.GetFullPath(FileName); } catch { } if (Context.DebugPreprocessor) { Console.WriteLine("CPreprocessorInternal(FileName={0})", FileName); } Text = CPreprocessor.RemoveComments(Text.Replace("\r\n", "\n").Replace("\r", "\n")); this.Text = Text; this.CurrentFileName = FileName; this.CTokenizer = new CTokenizer(Text, TokenizeSpaces: true); this.Context = Context; this.Tokens = new CTokenReader(CTokenizer.Tokenize()); this.Tokens.MoveNextSpace(); OutputLine(); //Console.WriteLine(Tokens.GetString()); }