public bool LoadFromFile(string Filename) { bool result = false; string[] lines; lines = File.ReadAllLines(Filename); CLex lexer = new CLex(); lexer.IgnoreWhitespace = true; lexer.IgnoreEol = true; lexer.Initialise(Filename); SNodeBase node = null; node = ParseNode(lexer); // // Print(node); // if (node != null) { //if (node is SNodeList) //{ // SNodeList list = node as SNodeList; // if (list.Items.Count > 1) // { // if (list.Items[0] is SNodeAtom) // { // SNodeAtom value = list.Items[0] as SNodeAtom; // this.Name = value.Value; // } // this.Items = new List<SNodeBase>(); // for (int index = 1; index < list.Items.Count; index++) // { // this.Items.Add(list.Items[index]); // } // } //} } if (node is SExpression) { SExpression sexpr = node as SExpression; this.Name = sexpr.Name; this.Items = sexpr.Items; } // return(result); }
private void btnFStart_Click(object sender, EventArgs e) { TablesHash th = new TablesHash(); CLex Lex = new CLex(); Lex.strPSource = tbFSource.Lines; Lex.enumPState = TState.Start; symbols.Text = ""; numbers.Text = ""; reserved.Text = ""; try { while (Lex.enumPState != TState.Finish) { Lex.NextToken(); String word = Lex.strPLexicalUnit; th.add_word(word, Lex.enumPToken); } for (int i = 0; i < th.count_letter_words; i++) { symbols.Text += th.letter_words[i] + System.Environment.NewLine; } for (int i = 0; i < th.count_digit_words; i++) { numbers.Text += th.digit_words[i] + System.Environment.NewLine; } for (int i = 0; i < th.count_symbolic_words; i++) { reserved.Text += th.symbolic_words[i] + System.Environment.NewLine; } } catch (Exception exc) { tbFSource.Select(); tbFSource.SelectionStart = 0; int n = 0; for (int i = 0; i < Lex.intPSourceRowSelection; i++) { n += tbFSource.Lines[i].Length + 2; } n += Lex.intPSourceColSelection; tbFSource.SelectionLength = n; } }
private SNodeBase ParseList(CLex lexer) { SNodeList node = new SNodeList(); node.Items = new List <SNodeBase>(); lexer.GetToken(); while ((lexer.CurToken.Type != TokenType.EOF) && !((lexer.CurToken.Type == TokenType.Symbol) && (lexer.CurToken.Value == ")") ) ) { // node.Items.Add(ParseNode(lexer)); } // == ) lexer.GetToken(); return(node); }
private SNodeBase ParseNode(CLex lexer) { if ((lexer.CurToken.Type == TokenType.Symbol) && (lexer.CurToken.Value == "(")) { //return ParseList(lexer); SNodeBase node = ParseList(lexer); if (node is SNodeList) { SNodeList list = node as SNodeList; // if (list.Items.Count > 0) { SExpression sexpr = new SExpression(); if (list.Items[0] is SNodeAtom) { SNodeAtom value = list.Items[0] as SNodeAtom; sexpr.Name = value.Value; } sexpr.Items = new List <SNodeBase>(); for (int index = 1; index < list.Items.Count; index++) { sexpr.Items.Add(list.Items[index]); } // return(sexpr); } else { return(node); } } else { return(node); } } else { // string SNodeAtom node; if (lexer.CurToken.Type == TokenType.StringLiteral) { string value = lexer.CurToken.Value; if (value.StartsWith("\"") && value.EndsWith("\"")) { value = value.Substring(1, value.Length - 2); // descape " and \ } node = new SNodeAtom(value); } else { node = new SNodeAtom(lexer.CurToken.Value); } lexer.GetToken(); return(node); } }