public void SetTextAsThisToken(string rawText) { //Stream stream = new MemoryStream(UnicodeEncoding.UTF8.GetBytes(rawText)); StreamReader stream = new StreamReader(new MemoryStream(Globals.LpcInternalCodec.GetBytes(rawText)), Globals.LpcFileCodec, false); ParseMap map; SyntaxRules lpcRules = new SyntaxRules(out map); Scanner.Scanner scanner = new Scanner.Scanner(stream); Lexer.Lexer lexer = new Lexer.Lexer(scanner); Parser parser = new Parser(lexer, map); foreach (Token token in parser.LPCTokens) { if (token.Lexmes.Count > 0) { Token copy = token; Type = copy.Type; object tempLexmes = copy.Lexmes.ToArray().Clone(); this.Lexmes = new List <Stellarmass.LPC.Lexer.Lexme>((Lexer.Lexme[])tempLexmes); for (int outter = 0; outter < copy.Children.Count; outter++) { this.Children.Add(new List <Token>()); foreach (Token t in copy.Children[outter]) { this.Children[outter].Add(new Token(t)); } } return; } } return; }
public void InsertTextAsTokens(string rawText, int childIndex, int tokenIndex) { //Stream stream = new MemoryStream(UnicodeEncoding.UTF8.GetBytes(rawText)); StreamReader stream = new StreamReader(new MemoryStream(Globals.LpcInternalCodec.GetBytes(rawText)), Globals.LpcFileCodec, false); ParseMap map; SyntaxRules lpcRules = new SyntaxRules(out map); Scanner.Scanner scanner = new Scanner.Scanner(stream); Lexer.Lexer lexer = new Lexer.Lexer(scanner); Parser parser = new Parser(lexer, map); this.Tokens.InsertRange(tokenIndex, parser.LPCTokens); }
public static List <Token> CreateTokenListFromText(string rawText) { //Stream stream = new MemoryStream(UnicodeEncoding.UTF8.GetBytes(rawText)); StreamReader stream = new StreamReader(new MemoryStream(Globals.LpcInternalCodec.GetBytes(rawText)), Globals.LpcFileCodec, false); ParseMap map; SyntaxRules lpcRules = new SyntaxRules(out map); Scanner.Scanner scanner = new Scanner.Scanner(stream); Lexer.Lexer lexer = new Lexer.Lexer(scanner); Parser parser = new Parser(lexer, map); return(parser.LPCTokens); }