public void OrTokenTest() { LexicalAnalyzer lexer = new LexicalAnalyzer("or"); TokenType token = lexer.MoveNext(); Assert.AreEqual(TokenType.Or, token); Assert.AreEqual("or", lexer.Current); }
public void LeftParenthesisTest() { LexicalAnalyzer lexer = new LexicalAnalyzer("("); TokenType token = lexer.MoveNext(); Assert.AreEqual(TokenType.LeftParenthesis, token); Assert.AreEqual("(", lexer.Current); }
public void NotTokenTest() { LexicalAnalyzer lexer = new LexicalAnalyzer("not"); TokenType token = lexer.MoveNext(); Assert.AreEqual(TokenType.Not, token); Assert.AreEqual("not", lexer.Current); }
public void AndTokenTest() { LexicalAnalyzer lexer = new LexicalAnalyzer("and"); TokenType token = lexer.MoveNext(); Assert.AreEqual(TokenType.And, token); Assert.AreEqual("and", lexer.Current); }
public void IdentityTokenTest() { LexicalAnalyzer lexer = new LexicalAnalyzer("i:user1"); TokenType token = lexer.MoveNext(); Assert.AreEqual(TokenType.Identity, token); Assert.AreEqual("i:", lexer.Current); }
public void RoleTokenTest() { LexicalAnalyzer lexer = new LexicalAnalyzer("r:role1"); TokenType token = lexer.MoveNext(); Assert.AreEqual(TokenType.Role, token); Assert.AreEqual("r:", lexer.Current); }
public AnalyzerLEDEER(string xsentences) { //Set sentencias sentences = xsentences; //Construir análisis lexan = new LexicalAnalyzer(); //Analizador léxico synan = new SyntaxAnalyzer(lexan.createTableSymbol(sentences)); //Crear tabla de símbolo }
/// <summary> /// Parses the the specified expression into a /// <see cref="BooleanExpression"/>. /// </summary> /// <param name="expression">An expression.</param> /// <returns>A <see cref="BooleanExpression"/> /// object that is the root of the parse tree.</returns> public BooleanExpression Parse(string expression) { this.lexer = new LexicalAnalyzer(expression); this.MoveNext(); BooleanExpression c = this.ParseOrOperator(); this.AssertTokenType(TokenType.EndOfFile); return c; }
public void QuotedStringTest() { const string tokenString = "\"Quoted string\""; LexicalAnalyzer lexer = new LexicalAnalyzer(tokenString); TokenType token = lexer.MoveNext(); Assert.AreEqual(TokenType.QuotedString, token); Assert.AreEqual(tokenString, lexer.Current); }
public void TestIsLiteral() { Assert.AreEqual(true, LexicalAnalyzer.isLiteral("\"hoi dit is een string\"")); Assert.AreEqual(true, LexicalAnalyzer.isLiteral("5.1234")); Assert.AreEqual(true, LexicalAnalyzer.isLiteral("3")); Assert.AreEqual(true, LexicalAnalyzer.isLiteral("true")); Assert.AreEqual(true, LexicalAnalyzer.isLiteral("false")); }
private static void Case1(string fileName) { var hello = File.Open(fileName, FileMode.Open); var lexicalAnalyzer = new LexicalAnalyzer(hello); var tokens = lexicalAnalyzer.GetTokens(200); Console.WriteLine(tokens.Length); hello.Close(); }
public Tree <CellBase> ParseOne(LexicalAnalyzer theAnalyzer) { theAnalyzer.GoToNextToken(myKeyword); if (theAnalyzer.Token.Length == 0) { return(null); } return(ParseElement(theAnalyzer)); }
public void TestSimpleGrouping() { List <string> control = new List <string> { @"[L]", @"\", @"[T]" }; List <string> test = LexicalAnalyzer.GroupConversionTokens(@"([L])\([T])"); CollectionAssert.AreEqual(test, control); }
static void Main(string[] args) { ILexicalAnalyzer lexicalAnalyzer = new LexicalAnalyzer(); ITranspiler transpiler = new Transpiler(lexicalAnalyzer); transpiler.Transpile(Directory.GetCurrentDirectory() + @"\Program.vm"); Console.ReadLine(); }
public void EofTokenTest() { LexicalAnalyzer lexer = new LexicalAnalyzer(String.Empty); lexer.MoveNext(); TokenType token = lexer.MoveNext(); Assert.AreEqual(TokenType.EndOfFile, token); }
public ByteCodeWriter(LexicalAnalyzer lexer) { // Bytecode size must not change (won't be able to read saved compiled data) Debug.Assert(sizeof(OpCode) == sizeof(Int32)); ByteCodeEntries = new List <ByteCodeEntry>(); Counters = new List <int>(); Counters.Add(0); Lexer = lexer ?? throw new ArgumentNullException(nameof(lexer)); }
public Compiler(LexicalAnalyzer lexicalAnalyzer, SyntaxAnalyzer syntaxAnalyer, SemanticsAnalyzer semanticsAnalyzer, ObjectCodeGenerator objCodeGenerator) { this._lexicalAnalyzer = lexicalAnalyzer; this._syntaxAnalyer = syntaxAnalyer; this._semanticsAnalyer = semanticsAnalyzer; this._objCodeGenerator = objCodeGenerator; }
/// <summary> /// 字句分割 /// "[1] あ [2] いうえ "を"[1]"," ", "あ"," ","[2]"," ","いうえ"," "に分割 /// </summary> /// <param name="st"></param> /// <returns></returns> private static List <string> lex(StringStream st) { var strs = new List <string>(); var state = 0; var startIndex = 0; VoidMethod reduce = delegate { if (st.CurrentPosition == startIndex) { return; } var length = st.CurrentPosition - startIndex; strs.Add(st.Substring(startIndex, length)); startIndex = st.CurrentPosition; }; while (!st.EOS) { if (st.Current == '[') { if (state == 1) //"["内部 { goto unanalyzable; } reduce(); state = 1; st.ShiftNext(); } else if (st.Current == ']') { if (state != 1) //"["外部 { goto unanalyzable; } st.ShiftNext(); reduce(); state = 0; } else if (state == 0 && LexicalAnalyzer.IsWhiteSpace(st.Current)) { reduce(); LexicalAnalyzer.SkipAllSpace(st); reduce(); } else { st.ShiftNext(); } } reduce(); return(strs); unanalyzable: return(null); }
public string Render(string paragraph) { var analyzer = new LexicalAnalyzer(); var tokens = analyzer.Analyze(paragraph); var parser = new TokenParser(tokens, paragraph); var tags = parser.Parse(); var tagInserter = new TagInserter(); var HTMLtext = tagInserter.Insert(paragraph, tags); return(HTMLtext); }
private static void Case2(string fileName) { var hello = File.Open(fileName, FileMode.Open); var lexicalAnalyzer = new LexicalAnalyzer(hello); while (!lexicalAnalyzer.IsEnded) { Console.WriteLine(lexicalAnalyzer.GetNextToken()); } hello.Close(); }
/// <summary> /// Initializes a new instance of the <see cref="SyntacticAnalyzer"/> class with the specified <see cref="LexicalAnalyzer"/>. /// </summary> /// <param name="lexical">The lexical analyzer.</param> public SyntacticAnalyzer(LexicalAnalyzer lexical) { if (lexical == null) { throw new ArgumentNullException(nameof(lexical)); } _lexical = lexical; _symbolTable = new VectorSymbolTable(); _codeGenerator = new CodeGenerator(); }
private void Button_Click(object sender, RoutedEventArgs e) { try { Stopwatch s1 = new Stopwatch(); s1.Start(); var textToTest = txtXml.Text; var size = System.Text.ASCIIEncoding.ASCII.GetByteCount(textToTest); if (size > 1024 * 100) { throw new Exception(); } // MessageBox.Show(size.ToString() + " bytes"); LexicalAnalyzer lexicalAnalyzer = new LexicalAnalyzer(); Tokens = lexicalAnalyzer.FindTokensInText(textToTest.Trim()); this.DataGrid1.ItemsSource = Tokens; SyntaxAnalyzerPDA syntaxAnalyzer = new SyntaxAnalyzerPDA(); var valid = syntaxAnalyzer.Validate(Tokens); if (valid) { s1.Stop(); MessageBox.Show("Yes, valid - " + s1.ElapsedMilliseconds.ToString() + "ms"); } else { MessageBox.Show("No, invalid"); } } catch (Exception ex) { var x = ex; MessageBox.Show("Error, Invalid"); } // textToTest = textToTest.Replace(" ", string.Empty); // [LEXER] 1. get each character as a lexeme // [LEXER] 2. based on each lexeme, deterministically produce a token based on the rules (or grammar) of each lexeme // 2.1, each lexeme is a non-terminal, which can be further evaluated/derived until it becomes a terminal. when it has been exhaustively derived, we will consider that as a token // For example for the simple xml <course name="automata"/>, // when we read the "<" opening tag, that's already one token because sigma will always point to an opening tag, // when we read the "c" in "course", we will keep reading "o", "u", "r", "s", "e" until the next space, and treat the "course" string as one token. // when we read the "name" that's a token // when we read "=", that's a token // when we read the double quotes (") that's a token" // etc. // 3. When we are traversing the string and figuring out the tokens, we look at a dictionary that stores key-value pairs for each token and its terminator. a // 3.1 For example, for the "<" token, the dictionary can contain the "/>" terminator for self-closing tags // 3.2 For the double quotes, the dictionary can contain double quotes to enclose attribute values // [SYNTAX/PARSER] 4. As we are identifying tokens, push it to a stack, and if we encounter a terminator token, we will pop the token in the top of stack which the terminator terminates. // 4.1 we are guaranteed that the item at the top of the stack will always terminate first if it is a valid XML. if not, then the xml is invalid. If the stack becomes empty and there are no more tokens, then the XML is accepted }
private void LexicalAnalyzerItem_Click(object sender, EventArgs e) { LexicalAnalyzer lexicalAnalyzer = new LexicalAnalyzer(this.CodeTextBox.Text); lexicalAnalyzer.Analyses(out tokenList, out errorList); int record_mouse = CodeTextBox.SelectionStart; ColorWord(); CodeTextBox.Select(record_mouse, record_mouse); PrintResult(tokenList, errorList); }
private void CheckVariablesUsage() { LexicalAnalyzer lexAnalyzer = new LexicalAnalyzer(); SyntaxAnalyzer syntaxAnalyzer; foreach (Transition trans in TransitionsList) { if (trans is SimpleTransition strans) { lexAnalyzer.Source = strans.Condition; syntaxAnalyzer = new SyntaxAnalyzer(lexAnalyzer, VariableCollection.GetConditionDictionary(trans.OwnerDraw.OwnerSheet).Keys.ToList()); foreach (SyntaxToken token in syntaxAnalyzer.Tokens) { if (token.Qualifier != SyntaxToken.Qualifiers.Correct) { MessagesList.Add(new CheckMessage(CheckMessage.MessageTypes.Error, string.Format("{0}: {1}.", trans.Name, token.ToString()), trans)); } } foreach (string outputOperation in strans.OutputsList) { string outputName = LexicalRules.GetOutputId(outputOperation); if (!trans.OwnerDraw.OwnerSheet.Variables.InternalOutputs.Exists(output => output.Name == outputName)) { MessagesList.Add(new CheckMessage(CheckMessage.MessageTypes.Error, "Output " + outputName + " is not defined in variables list.", trans)); } } } } foreach (DrawableObject obj in ObjectsTable) { if (obj is State state) { //state.EnterOutputsList.RemoveAll(str => !state.OwnerDraw.OwnerSheet.Variables.InternalOutputs.Exists(var => var.Name == LexicalAnalyzer.GetId(str))); foreach (string outputOperation in state.EnterOutputsList) { string outputName = LexicalRules.GetOutputId(outputOperation); if (!state.OwnerDraw.OwnerSheet.Variables.InternalOutputs.Exists(var => var.Name == LexicalAnalyzer.GetId(outputName))) { MessagesList.Add(new CheckMessage(CheckMessage.MessageTypes.Error, "Output " + outputName + " is not defined in variables list.", state)); } } //state.ExitOutputsList.RemoveAll(str => !state.OwnerDraw.OwnerSheet.Variables.InternalOutputs.Exists(var => var.Name == LexicalAnalyzer.GetId(str))); foreach (string outputOperation in state.ExitOutputsList) { string outputName = LexicalRules.GetOutputId(outputOperation); if (!state.OwnerDraw.OwnerSheet.Variables.InternalOutputs.Exists(var => var.Name == LexicalAnalyzer.GetId(outputName))) { MessagesList.Add(new CheckMessage(CheckMessage.MessageTypes.Error, "Output " + outputName + " is not defined in variables list.", state)); } } } } }
public void LoadRulesLA_test2() { LexicalAnalyzer la = new LexicalAnalyzer(); if (!la.LoadRules(path)) { Assert.Fail(); } la.Analyze("=2.5;\n"); Assert.AreEqual(la.data[1].property, "Num"); Assert.AreEqual(la.data[1].value, "2.5"); }
static void Main(string[] args) { KeywordHashing keywordHashing = new KeywordHashing(Keyword.keywords); FileHandler fileHandler = new FileHandler(); StreamReader reader = fileHandler.ReadData(); LexicalAnalyzer lexicalAnalyzer = new LexicalAnalyzer(reader, keywordHashing); lexicalAnalyzer.PerformAnalysis(); lexicalAnalyzer.print(); }
private void ThompsonToolStripMenuItem_Click(object sender, EventArgs e) { RichTextBox richTextBox = tabControl1.SelectedTab.Controls.Cast <RichTextBox>().FirstOrDefault(x => x is RichTextBox); string content = richTextBox.Text; indexImage = 0; images = new List <string>(); if (automataImage.Image != null) { automataImage.Image.Dispose(); } automataImage.Image = null; indexTable = 0; tables = new List <string>(); if (tableBox.Image != null) { tableBox.Image.Dispose(); } tableBox.Image = null; lexicalAnalyzer = new LexicalAnalyzer(); lexicalAnalyzer.Scanner(content); if (lexicalAnalyzer.ListError.Count() == 0) { syntacticAnalyzer = new SyntacticAnalyzer(lexicalAnalyzer.ListToken); if (syntacticAnalyzer.ListError.Count() == 0) { interpreter = new Interpreter(lexicalAnalyzer.ListToken); images.AddRange(interpreter.RoutesNFA); if (images.Count > 0) { LoadImage(0); } tables.AddRange(interpreter.RoutesTables); if (tables.Count > 0) { LoadTable(0); } commandLineTextBox.Text = interpreter.ConsoleMessage.ToString(); } else { syntacticAnalyzer.GenerateReports(); } } }
private void intelliSense() { LexicalAnalyzer lexicalAnalyzer = new LexicalAnalyzer(); RichTextBox richTextBox = tabControl1.SelectedTab.Controls.Cast <RichTextBox>().FirstOrDefault(x => x is RichTextBox); string content = richTextBox.Text; lexicalAnalyzer.scanner(content); foreach (Token item in lexicalAnalyzer.ListToken) { if (item.TypeToken.Equals("Reservada Grafica") || item.TypeToken.Equals("Reservada Nombre") || item.TypeToken.Equals("Reservada Continente") || item.TypeToken.Equals("Reservada Pais") || item.TypeToken.Equals("Reservada Poblacion") || item.TypeToken.Equals("Reservada Saturacion") || item.TypeToken.Equals("Reservada Bandera")) { wordColor(item.Value, Color.FromArgb(41, 83, 131), richTextBox); } } foreach (Token item in lexicalAnalyzer.ListToken) { if (item.TypeToken.Equals("Numero")) { wordColor(item.Value, Color.FromArgb(30, 232, 190), richTextBox); } } foreach (Token item in lexicalAnalyzer.ListToken) { if (item.TypeToken.Equals("Simbolo Llave Izquierda") || item.TypeToken.Equals("Simbolo Llave Derecha")) { wordColor(item.Value, Color.FromArgb(227, 103, 149), richTextBox); } } foreach (Token item in lexicalAnalyzer.ListToken) { if (item.TypeToken.Equals("Simbolo Punto y Coma")) { wordColor(item.Value, Color.FromArgb(207, 113, 65), richTextBox); } } foreach (Token item in lexicalAnalyzer.ListToken) { if (item.TypeToken.Equals("Cadena")) { wordColor(item.Value, Color.FromArgb(217, 171, 103), richTextBox); } } }
private void VarDecl(LexicalAnalyzer la) { (token, lexeme) = la.GetNextToken(); if (token == "finalt") { Type(la); (token, lexeme) = la.GetNextToken(); if (token == "idt") { if (token == "assignOpt") { if (token == "numt") { if (token == "semit") { VarDecl(la); } else { Console.WriteLine("Expected ; "); } } else { Console.WriteLine("Expected num token "); } } else { Console.WriteLine("Expected = "); } } else { Console.WriteLine("Expected id token "); } } else { Type(la); IdentifierList(la); (token, lexeme) = la.GetNextToken(); if (token == "semit") { VarDecl(la); } else { Console.WriteLine("Expected ; "); } } }
public BottomUpParser Parse(LexicalAnalyzer lexicalAnalyzer) { Current = null; Stack.Clear(); ParsingNodes.Clear(); TopLevelAST = null; LexicalAnalyzer = lexicalAnalyzer; Parse(); return(this); }
public void TestCodeGenerated() { const string expression = "1+2+3"; var lexicalAnalyser = new LexicalAnalyzer(expression, 0); var syntaxAnalyzer = new SyntaxAnalyzer(lexicalAnalyser); var node = syntaxAnalyzer.Expression(0); var fileWriter = new FileWriter(""); var semanticAnalyzer = new SemanticAnalyzer(syntaxAnalyzer); var codeGenerator = new CodeGenerator(semanticAnalyzer, fileWriter); codeGenerator.GenerateCode(node); Assert.True(File.Exists(path) && new FileInfo(path).Length != 0); }
public List<Tree<CellBase>> Parse(LexicalAnalyzer theAnalyzer) { var list = new List<Tree<CellBase>>(); Tree<CellBase> first = ParseOne(theAnalyzer); if (first != null) { list.Add(first); List<Tree<CellBase>> rest = Parse(theAnalyzer); list.AddRange(rest); if (rest.Count == 0) { var trailer = theAnalyzer.Trailer; if (trailer.Length > 0) first.Value.SetAttribute(CellAttribute.Trailer, trailer); } } return list; }
Tree<CellBase> ParseElement(LexicalAnalyzer theAnalyzer) { string tag = theAnalyzer.Token; string leader = theAnalyzer.Leader; theAnalyzer.PushEnd("/" + myKeyword); List<Tree<CellBase>> children = myChildParser.Parse(theAnalyzer); if (IRequireChildren && children.Count == 0) { throw new ApplicationException(string.Format("Can't find tag: {0}", myChildParser.Keyword)); } theAnalyzer.PopEnd(); theAnalyzer.GoToNextToken("/" + myKeyword); if (theAnalyzer.Token.Length == 0) throw new ApplicationException("expected /" + myKeyword + " tag"); var result = new TreeList<CellBase>(new CellBase(HtmlToText(theAnalyzer.Leader))); result.Value.SetAttribute(CellAttribute.Body, theAnalyzer.Leader); result.Value.SetAttribute(CellAttribute.EndTag, theAnalyzer.Token); if (leader.Length > 0) result.Value.SetAttribute(CellAttribute.Leader, leader); result.Value.SetAttribute(CellAttribute.StartTag, tag); foreach (Tree<CellBase> child in children) result.AddBranch(child); return result; }
static void Main(string[] args) { TokenQueue = new Queue<Token>(); LexicalAnalyzer scanner = new LexicalAnalyzer(); scanner.OpenFile(args[0]); Token token = new Token(); string output; try { bool done = false; while (!done) { token = scanner.GetNextToken(); if (token.tag != null) { output = string.Format("{0,-20} {1,-5} {2,-5} {3}", token.tag, scanner.line, (scanner.column - token.lexeme.Length - 1), token.lexeme); TokenQueue.Enqueue(token); } if(token.tag.Equals(Tags.MP_EOF)) { done = true; } } Parser parser = new Parser(TokenQueue, scanner, args[0]); parser.SystemGoal(); Console.WriteLine("Program Parsed Correctly"); } catch(SyntaxException e) { Console.WriteLine(e.ErrorMessage); } Console.WriteLine("Press Any Key To Exit"); Console.Read(); }
public Tree<CellBase> ParseOne(LexicalAnalyzer theAnalyzer) { theAnalyzer.GoToNextToken(myKeyword); if (theAnalyzer.Token.Length == 0) return null; return ParseElement(theAnalyzer); }
private void HighlightTokens() { this.SelectionStart = 0; this.SelectionLength = this.Text.Length; this.SelectionUnderlineColor = UnderlineColor.None; this.SelectionUnderlineStyle = UnderlineStyle.None; LexicalAnalyzer lexer = new LexicalAnalyzer(this.Text); TokenType lastToken = TokenType.Any; for (TokenType tokenType = lexer.MoveNext(); tokenType != TokenType.EndOfFile; tokenType = lexer.MoveNext()) { this.SelectionStart = lexer.CurrentMatch.Index; this.SelectionLength = lexer.CurrentMatch.Length; switch (tokenType) { case TokenType.And: case TokenType.Or: case TokenType.Not: this.SelectionColor = Color.Blue; break; case TokenType.Identity: case TokenType.Role: this.SelectionColor = Color.Navy; break; case TokenType.Word: case TokenType.QuotedString: if (lastToken == TokenType.Identity || lastToken == TokenType.Role) { this.SelectionColor = Color.Navy; } else { this.SelectionColor = Color.Black; } break; default: this.SelectionColor = Color.Black; break; } lastToken = tokenType; } }
public void ComplexExpressionTest() { string input = "((Role1 And \"Super User\" And Role3) Or Not(Role2)"; LexicalAnalyzer lexer = new LexicalAnalyzer(input); ArrayList list = new ArrayList(); for (TokenType tokenType = lexer.MoveNext(); tokenType != TokenType.EndOfFile; tokenType = lexer.MoveNext()) { list.Add(new Item(tokenType, lexer.Current)); } Item item = (Item)list[0]; Assert.AreEqual(TokenType.LeftParenthesis, item.TokenType); Assert.AreEqual("(", item.Token); item = (Item)list[1]; Assert.AreEqual(TokenType.LeftParenthesis, item.TokenType); Assert.AreEqual("(", item.Token); item = (Item)list[2]; Assert.AreEqual(TokenType.Word, item.TokenType); Assert.AreEqual("Role1", item.Token); item = (Item)list[3]; Assert.AreEqual(TokenType.And, item.TokenType); Assert.AreEqual("And", item.Token); item = (Item)list[4]; Assert.AreEqual(TokenType.QuotedString, item.TokenType); Assert.AreEqual("\"Super User\"", item.Token); item = (Item)list[5]; Assert.AreEqual(TokenType.And, item.TokenType); Assert.AreEqual("And", item.Token); item = (Item)list[6]; Assert.AreEqual(TokenType.Word, item.TokenType); Assert.AreEqual("Role3", item.Token); item = (Item)list[7]; Assert.AreEqual(TokenType.RightParenthesis, item.TokenType); Assert.AreEqual(")", item.Token); item = (Item)list[8]; Assert.AreEqual(TokenType.Or, item.TokenType); Assert.AreEqual("Or", item.Token); item = (Item)list[9]; Assert.AreEqual(TokenType.Not, item.TokenType); Assert.AreEqual("Not", item.Token); item = (Item)list[10]; Assert.AreEqual(TokenType.LeftParenthesis, item.TokenType); Assert.AreEqual("(", item.Token); item = (Item)list[11]; Assert.AreEqual(TokenType.Word, item.TokenType); Assert.AreEqual("Role2", item.Token); item = (Item)list[12]; Assert.AreEqual(TokenType.RightParenthesis, item.TokenType); Assert.AreEqual(")", item.Token); Assert.AreEqual(13, list.Count); }
public List<Tree<CellBase>> Parse(LexicalAnalyzer theAnalyzer) { var result = new List<Tree<CellBase>>(); ListParser firstChildParser = null; int firstPosition = int.MaxValue; foreach (ListParser childParser in myChildParsers) { int contentPosition = theAnalyzer.FindPosition(childParser.Keyword); if (contentPosition >= 0 && contentPosition < firstPosition) { firstPosition = contentPosition; firstChildParser = childParser; } } if (firstChildParser != null) { result.Add(firstChildParser.ParseOne(theAnalyzer)); result.AddRange(Parse(theAnalyzer)); } return result; }
public void WhitespaceTest() { LexicalAnalyzer lexer = new LexicalAnalyzer(" \n\r"); TokenType token = lexer.MoveNext(); Assert.AreEqual(TokenType.EndOfFile, token); }
public Сompiler(LexicalAnalyzer lexicalAnalizer) { _lexicalAnalizer = lexicalAnalizer; }