public void TestScanCompare() { var lex = new Lexer.Lexer(); var reader = new StringReader( " < <= == != >= >" ); var t = lex.Scan( reader ); Assert.AreEqual( typeof( Word), t.GetType() ); Assert.AreEqual( Tag.Less, t.Tag ); t = lex.Scan( reader ); Assert.AreEqual( typeof( Word ), t.GetType() ); Assert.AreEqual( Tag.LessOrEqual, t.Tag ); t = lex.Scan( reader ); Assert.AreEqual( typeof( Word ), t.GetType() ); Assert.AreEqual( Tag.Equal, t.Tag ); t = lex.Scan( reader ); Assert.AreEqual( typeof( Word ), t.GetType() ); Assert.AreEqual( Tag.NotEqual, t.Tag ); t = lex.Scan( reader ); Assert.AreEqual( typeof( Word ), t.GetType() ); Assert.AreEqual( Tag.BetterOrEqual, t.Tag ); t = lex.Scan( reader ); Assert.AreEqual( typeof( Word ), t.GetType() ); Assert.AreEqual( Tag.Better, t.Tag ); }
static void Main(string[] args) { if (args.Length != 1) { Console.WriteLine("Please provide input file"); Environment.Exit(-1); } string inputText = File.ReadAllText(args[0]); string fileName = Path.GetFileNameWithoutExtension(args[0]); Lexer.Lexer lex = new Lexer.Lexer(inputText); using (StreamWriter tokenFile = new System.IO.StreamWriter($"{fileName}.outlextokens")) using (StreamWriter tokenErrorFile = new System.IO.StreamWriter($"{fileName}.outlexerrors")) { Token t; do { t = lex.GetNextToken(); Console.WriteLine(t.ToString()); if (lex.IsErrorToken(t.TokenType)) { tokenErrorFile.WriteLine(t.ToString()); } else { tokenFile.WriteLine(t.ToString()); } }while (t.TokenType != TokenType.EOF); } }
static void Main(string[] args) { const string WELCOME = "Welcome to Calculator. Feel free to type any expression you want."; const string PROMPT = ">> "; Console.Out.Write(WELCOME + Environment.NewLine); while (true) { Console.Out.Write(PROMPT); try { var input = Console.In.ReadLine(); if (string.Compare(input, "exit", StringComparison.OrdinalIgnoreCase) == 0) { break; } var lexer = new Lexer.Lexer(input); var parser = new Parser.Parser(lexer); var expression = parser.Parse(); var result = Evaluate.Evaluator.Eval(expression); Console.Out.Write(result.Inspect()); Console.Out.Write(Environment.NewLine); } catch (Exception e) { Console.Out.Write("Oops! Something seems to go wrong." + Environment.NewLine); Console.Out.Write(e.Message); Console.Out.Write(Environment.NewLine); } } }
public Expression Parse(string fileData) { // First round to construct the label table Lexer lexer = new Lexer(fileData); var output = ParseExpression(lexer); return output; //while (lexer.HasNext()) //{ // //ParseInstruction(lexer, true); //} //// reset the stuff after the first go round that found the label locations //Kernel = new List<uint>(KERNEL_SIZE); //for (int i = 0; i < KERNEL_SIZE; i++) { Kernel.Add(0); } //lexer = new Lexer.Lexer(fileData); //KernelIndex = 0; //LineNumber = 1; //while (lexer.HasNext()) //{ // //ParseInstruction(lexer, false); //} //return Kernel.ToArray(); }
public ParseContext(IParseStore parseStore, Lexer.Lexer lexer, string source) { _parseStore = parseStore; _lexer = lexer; _source = source; PrepareNextToken(); }
public Handler() { var file = new HandlerFiles(); var code = file.GetCode(); Lex = new Lexer.Lexer(new SourceCode(code)); Parser = new Parser(Lex); }
static void Main(string[] args) { var path = @"C:\Users\zombi\Documents\Study\CC\cc_eiffel\Lexer\TestSrc\test1.txt"; string fileAsString = File.ReadAllText(path); var lexer = new Lexer.Lexer(); var tokens = lexer.Parse(fileAsString, false); var syntaxer = new Syntaxer(); syntaxer.BuildAST(tokens); }
public void TestScanComment() { var lex = new Lexer.Lexer(); var reader = new StringReader( "// Comment\n" ); var t = lex.Scan( reader ); Assert.AreEqual( typeof (Comment), t.GetType() ); Assert.AreEqual( " Comment", ( (Comment) t ).Value ); }
public void MessageField_NOK02() { const string text = " optional string test1 ;"; // = + number is missing var lex = new Lexer(text) { Matches = Helper.SplitText(text) }; Assert.IsFalse(lex.ParseMessageField()); Assert.AreEqual(3, lex.Tokens.Count); Assert.AreEqual(1, lex.Errors.Count); Assert.AreEqual(3, lex.Index); }
public void MessageField_NOK05() { const string text = " option"; // statement not finished var lex = new Lexer(text) { Matches = Helper.SplitText(text) }; Assert.IsFalse(lex.ParseMessageField()); Assert.AreEqual(0, lex.Tokens.Count); Assert.AreEqual(1, lex.Errors.Count); Assert.AreEqual(1, lex.Index); }
public void ParseFiledOption_NOK04() { const string text = " cpp=\r\n"; // incomplete statement var lex = new Lexer(text) { Matches = Helper.SplitText(text) }; Assert.IsFalse(lex.ParseFieldOption()); Assert.AreEqual(1, lex.Tokens.Count); Assert.AreEqual(1, lex.Errors.Count); Assert.AreEqual(3, lex.Index); }
public void ParseEnum_NOK05() { const string text = " option java_package\r\n"; var lex = new Lexer(text) { Matches = Helper.SplitText(text) }; Assert.IsFalse(lex.ParseOption(true)); Assert.AreEqual(2, lex.Tokens.Count); Assert.AreEqual(1, lex.Errors.Count); Assert.AreEqual(3, lex.Index); }
public void MessageField_NOK05() { const string text = " extensions "; // incomplete statement var lex = new Lexer(text) { Matches = Helper.SplitText(text) }; Assert.IsFalse(lex.ParseMessageExtensions()); Assert.AreEqual(1, lex.Tokens.Count); Assert.AreEqual(1, lex.Errors.Count); Assert.AreEqual(1, lex.Index); }
public void ParseExtend_NOK04() { const string text = " extend Foo x\r\n optional int32 bar = 126;\r\n }"; var lex = new Lexer(text) { Matches = Helper.SplitText(text) }; Assert.IsFalse(lex.ParseExtend()); Assert.AreEqual(2, lex.Tokens.Count); Assert.AreEqual(1, lex.Errors.Count); Assert.AreEqual(2, lex.Index); }
public void ParseExtend_NOK05() { const string text = " extend Foo {"; var lex = new Lexer(text) { Matches = Helper.SplitText(text) }; Assert.IsFalse(lex.ParseExtend()); Assert.AreEqual(2, lex.Tokens.Count); Assert.AreEqual(1, lex.Errors.Count); Assert.AreEqual(3, lex.Index); }
public void ParseMessage_NOK05() { const string text = " message SearchResponse x\r\n required string url = 2;\r\n optional string title = 3;\r\n repeated string snippets = 4;"; var lex = new Lexer(text) { Matches = Helper.SplitText(text) }; Assert.IsFalse(lex.ParseMessage(false)); Assert.AreEqual(2, lex.Tokens.Count); Assert.AreEqual(1, lex.Errors.Count); Assert.AreEqual(2, lex.Index); }
public void ParseMessage_NOK04() { const string text = " message SearchResponse"; var lex = new Lexer(text) { Matches = Helper.SplitText(text) }; Assert.IsFalse(lex.ParseMessage(false)); Assert.AreEqual(2, lex.Tokens.Count); Assert.AreEqual(1, lex.Errors.Count); Assert.AreEqual(2, lex.Index); }
public void ParseFieldOptions_NOK04() { const string text = " [default = 123"; var field = new Field { FieldType = FieldType.TypeInt32, HasOption = false }; var lex = new Lexer(text) { Matches = Helper.SplitText(text) }; Assert.IsFalse(lex.ParseFieldOptions(field)); Assert.AreEqual(2, lex.Tokens.Count); Assert.AreEqual(1, lex.Errors.Count); Assert.AreEqual(4, lex.Index); }
static void Main(string[] args) { var lexer = new Lexer.Lexer(args[0]); var l = lexer.Parse(); l.Add("$"); foreach (var str in l) { Console.WriteLine(str); } new Parser(l).Parse(); Console.ReadLine(); }
public void ParseFiledOption_NOK02() { const string text = " aaa"; // incomplete statement var lex = new Lexer(text) { Matches = Helper.SplitText(text) }; Assert.IsFalse(lex.ParseFieldOption()); Assert.AreEqual(1, lex.Tokens.Count); Assert.AreEqual(1, lex.Errors.Count); Assert.AreEqual(1, lex.Index); Assert.AreEqual(2, lex.Tokens[0].Position); Assert.AreEqual(3, lex.Tokens[0].Length); Assert.AreEqual(CodeType.Keyword, lex.Tokens[0].CodeType); }
public void TestLexer() { var typeNumbers = new TokenType("NUMBER", Regex.Parse("\\d+"), 1); var typeId = new TokenType("ID", Regex.Parse("[a-zA-Z_][a-zA-Z0-9_]*"), 1); var typeWhitespace = new TokenType("SPACE", Regex.Parse("\\s"), 1); var lexer = new Lexer.Lexer(new[] { typeNumbers, typeId, typeWhitespace }); foreach (var token in lexer.Read("Hallo Du 3")) { Console.WriteLine(token); } }
static void Main(string[] args) { while (true) { Console.Clear(); Console.WriteLine("The Basic Programming Language (2021)"); string source = Console.ReadLine("basic > "); Lexer.Lexer lexer = new Lexer.Lexer(); Token.Token[] tokens = lexer.GetTokens(source); for (rawToken in tokens) { rawToken = rawToken.Representation(); } Console.WriteLine(tokens) } }
public void Package_NOK03() { const string text = " package"; // still not finished line var lex = new Lexer(text) { Matches = Helper.SplitText(text) }; lex.ParsePackage(); Assert.AreEqual(1, lex.Tokens.Count); Assert.AreEqual(4, lex.Tokens[0].Position); Assert.AreEqual(7, lex.Tokens[0].Length); Assert.AreEqual(CodeType.TopLevelCmd, lex.Tokens[0].CodeType); Assert.AreEqual(0, lex.Line); Assert.AreEqual(1, lex.Errors.Count); Assert.AreEqual(11, lex.Errors[0].Position); Assert.AreEqual(1, lex.Errors[0].Length); Assert.AreEqual(0, lex.Line); }
public void Package_NOK02() { const string text = " package Teäst1.kjhkj.jjjj;"; var lex = new Lexer(text) { Matches = Helper.SplitText(text) }; lex.ParsePackage(); Assert.AreEqual(4, lex.Tokens.Count); Assert.AreEqual(4, lex.Tokens[0].Position); Assert.AreEqual(7, lex.Tokens[0].Length); Assert.AreEqual(CodeType.TopLevelCmd, lex.Tokens[0].CodeType); Assert.AreEqual(0, lex.Line); Assert.AreEqual(1, lex.Errors.Count); Assert.AreEqual(12, lex.Errors[0].Position); Assert.AreEqual(6, lex.Errors[0].Length); Assert.AreEqual(0, lex.Line); }
public void Import_NOK2() { const string text = " import \"blah fasel\""; var lex = new Lexer(text) { Matches = Helper.SplitText(text) }; lex.ParseImport(); Assert.AreEqual(2, lex.Tokens.Count); Assert.AreEqual(1, lex.Errors.Count); Assert.AreEqual(2, lex.Tokens[0].Position); Assert.AreEqual(6, lex.Tokens[0].Length); Assert.AreEqual(CodeType.TopLevelCmd, lex.Tokens[0].CodeType); Assert.AreEqual(0, lex.Line); Assert.AreEqual(9, lex.Tokens[1].Position); Assert.AreEqual(12, lex.Tokens[1].Length); Assert.AreEqual(CodeType.String, lex.Tokens[1].CodeType); Assert.AreEqual(0, lex.Line); }
public void Default_NOK03() { const string text = "default=-1234a]"; var lex = new Lexer(text) { Matches = Helper.SplitText(text) }; var field = new Field { FieldType = FieldType.TypeString, HasOption = false }; Assert.IsFalse(lex.ParseDefault(field)); Assert.AreEqual(1, lex.Tokens.Count); Assert.AreEqual(1, lex.Errors.Count); Assert.AreEqual(0, lex.Tokens[0].Position); Assert.AreEqual(7, lex.Tokens[0].Length); Assert.AreEqual(CodeType.Keyword, lex.Tokens[0].CodeType); Assert.AreEqual(8, lex.Errors[0].Position); Assert.AreEqual(6, lex.Errors[0].Length); Assert.AreEqual(CodeType.Text, lex.Errors[0].CodeType); }
public void TestScanFloatNum() { var lex = new Lexer.Lexer(); var reader = new StringReader( "2. 3.14 .5" ); var t = lex.Scan( reader ); Assert.AreEqual( typeof (Float), t.GetType() ); Assert.AreEqual( 2.0, ( (Float) t ).Value ); t = lex.Scan( reader ); Assert.AreEqual( typeof (Float), t.GetType() ); Assert.AreEqual( 3.14, ( (Float) t ).Value ); t = lex.Scan( reader ); Assert.AreEqual( typeof (Float), t.GetType() ); Assert.AreEqual( 0.5, ( (Float) t ).Value ); }
public void Default_NOK02() { const string text = "default=\"-1234a\"]"; var lex = new Lexer(text) { Matches = Helper.SplitText(text) }; var field = new Field { FieldType = FieldType.TypeInt32, HasOption = false }; Assert.IsTrue(lex.ParseDefault(field)); Assert.AreEqual(2, lex.Tokens.Count); Assert.AreEqual(1, lex.Errors.Count); Assert.AreEqual(0, lex.Tokens[0].Position); Assert.AreEqual(7, lex.Tokens[0].Length); Assert.AreEqual(CodeType.Keyword, lex.Tokens[0].CodeType); Assert.AreEqual(0, lex.Line); Assert.AreEqual(8, lex.Tokens[1].Position); Assert.AreEqual(1, lex.Tokens[1].Length); Assert.AreEqual(CodeType.Number, lex.Tokens[1].CodeType); Assert.AreEqual(0, lex.Line); }
void TestFirstSimpleCase() { using (var source = _sourceManager.OpenFile("LexerTests\\TestSources\\first_simple_test.txt")) { ISectioner sectioner = new Sectioner.Sectioner(); var sections = sectioner.GetSections(source); ILexer lexer = new Lexer.Lexer(); lexer.AddRecognizer(new SingleCharTokenRecognizer('+', TokenEnum.Code_Plus)); lexer.AddRecognizer(new IdTokenRecognizer()); var tokens = lexer.GetTokens(sections); Assert.Collection(tokens, x => Assert.Equal(x.Code, TokenEnum.Code_Identifier), x => Assert.Equal(x.Code, TokenEnum.Code_Plus), x => Assert.Equal(x.Code, TokenEnum.Code_Identifier), x => Assert.Equal(x.Code, TokenEnum.Code_Plus), x => Assert.Equal(x.Code, TokenEnum.Code_Identifier)); Assert.Collection(tokens, x => Assert.Equal(x.Text, "a"), x => Assert.Equal(x.Text, "+"), x => Assert.Equal(x.Text, "b"), x => Assert.Equal(x.Text, "+"), x => Assert.Equal(x.Text, "c")); Assert.Collection(tokens, x => Assert.Equal(x.Length, 1), x => Assert.Equal(x.Length, 1), x => Assert.Equal(x.Length, 1), x => Assert.Equal(x.Length, 1), x => Assert.Equal(x.Length, 1)); Assert.Collection(tokens, x => Assert.Equal(x.Location.GetLocationString(), "first_simple_test.txt: (1, 1)"), x => Assert.Equal(x.Location.GetLocationString(), "first_simple_test.txt: (1, 2)"), x => Assert.Equal(x.Location.GetLocationString(), "first_simple_test.txt: (1, 3)"), x => Assert.Equal(x.Location.GetLocationString(), "first_simple_test.txt: (1, 4)"), x => Assert.Equal(x.Location.GetLocationString(), "first_simple_test.txt: (1, 5)")); } }
public void ParseExtend_OK02() { const string Text = "extend Package1.Foo { \r\n optional string bar = 100;\r\n }"; var lex = new Lexer(Text) { Matches = Helper.SplitText(Text) }; Assert.IsTrue(lex.ParseExtend()); Assert.AreEqual(7, lex.Tokens.Count); Assert.AreEqual(0, lex.Errors.Count); Assert.AreEqual(14, lex.Index); Assert.AreEqual(0, lex.Tokens[0].Position); Assert.AreEqual(7, lex.Tokens[1].Position); Assert.AreEqual(16, lex.Tokens[2].Position); Assert.AreEqual(28, lex.Tokens[3].Position); Assert.AreEqual(37, lex.Tokens[4].Position); Assert.AreEqual(44, lex.Tokens[5].Position); Assert.AreEqual(50, lex.Tokens[6].Position); Assert.AreEqual(6, lex.Tokens[0].Length); Assert.AreEqual(8, lex.Tokens[1].Length); Assert.AreEqual(3, lex.Tokens[2].Length); Assert.AreEqual(8, lex.Tokens[3].Length); Assert.AreEqual(6, lex.Tokens[4].Length); Assert.AreEqual(3, lex.Tokens[5].Length); Assert.AreEqual(3, lex.Tokens[6].Length); Assert.AreEqual(CodeType.TopLevelCmd, lex.Tokens[0].CodeType); Assert.AreEqual(CodeType.SymRef, lex.Tokens[1].CodeType); Assert.AreEqual(CodeType.SymRef, lex.Tokens[2].CodeType); Assert.AreEqual(CodeType.FieldRule, lex.Tokens[3].CodeType); Assert.AreEqual(CodeType.Keyword, lex.Tokens[4].CodeType); Assert.AreEqual(CodeType.SymDef, lex.Tokens[5].CodeType); Assert.AreEqual(CodeType.Number, lex.Tokens[6].CodeType); }
static void Main(string[] args) { if (args.Length != 1) { Console.WriteLine("Please provide input file"); Environment.Exit(-1); } string filePath = $@"{Environment.CurrentDirectory}\{args[0]}"; string fileName = Path.GetFileNameWithoutExtension(filePath); string fileDirectory = Path.GetDirectoryName(filePath); string inputText = File.ReadAllText(filePath); Lexer.Lexer lex = new Lexer.Lexer(inputText); List <Token> tokensToParse = new List <Token>(); using (StreamWriter tokenFile = new StreamWriter($@"{fileDirectory}\{fileName}.outlextokens")) using (StreamWriter tokenErrorFile = new StreamWriter($@"{fileDirectory}\{fileName}.outlexerrors")) { Token t; do { t = lex.GetNextToken(); Console.WriteLine(t.ToString()); if (lex.IsErrorToken(t.TokenType)) { tokenErrorFile.WriteLine(t.ToString()); } else { tokenFile.WriteLine(t.ToString()); tokensToParse.Add(t); } }while (t.TokenType != TokenType.EOF); tokensToParse.RemoveAll(x => lex.IsCommentToken(x.TokenType)); } using (StreamWriter astStream = new StreamWriter($@"{fileDirectory}\{fileName}.outast")) using (StreamWriter derivationsStream = new StreamWriter($@"{fileDirectory}\{fileName}.outderivation")) using (StreamWriter syntaxErrorStream = new StreamWriter($@"{fileDirectory}\{fileName}.outsyntaxerrors")) using (StreamWriter symbolTablesStream = new StreamWriter($@"{fileDirectory}\{fileName}.outsymboltables")) using (StreamWriter semanticErrorStream = new StreamWriter($@"{fileDirectory}\{fileName}.outsemanticerrors")) { // Do parsing Parser.Parser parser = new Parser.Parser(tokensToParse, syntaxErrorStream, derivationsStream, astStream); Console.WriteLine(parser.Parse()); var tree = parser.GetASTTree(); derivationsStream.Flush(); syntaxErrorStream.Flush(); var printVisitor = new DOTPrinterVisitor(astStream); tree.Accept(printVisitor); astStream.Flush(); var symbolTableVisitor = new SymbolTableVisitor(semanticErrorStream); tree.Accept(symbolTableVisitor); symbolTablesStream.WriteLine(symbolTableVisitor.GlobalSymbolTable); Console.WriteLine(symbolTableVisitor.GlobalSymbolTable); var semanticCheckerVisitor = new SemanticCheckerVisitor(semanticErrorStream, symbolTableVisitor.GlobalSymbolTable); tree.Accept(semanticCheckerVisitor); } }
static void Main(string[] args) { HandlerFiles file = new HandlerFiles(); var code = file.GetCode(); var lex = new Lexer.Lexer(new SourceCode(code)); var parser = new Parser(lex); parser.ValidateSemanticServer(); //parser.Interpret(); //if (parser.CurrentToken.TokenType == TokenType.HTMLContent) //{ // Console.WriteLine(parser.CurrentToken.Lexeme); //} //var root = parser.Parse(); //foreach (var statementNode in root) // { // statementNode.ValidateSemantic(); // Console.WriteLine(statementNode); // } var stack = StackContext.Context.Stack.Peek(); //foreach (var statementNode in root) //{ // statementNode.Interpret(); //} //var parameters = new List<string> {"10", "20", "Addition"}; //var functiondeclaration = StackContext.Context.FunctionsNodes["operar"]; //StackContext.Context.Stack.Push(StackContext.Context.PastContexts[functiondeclaration.CodeGuid]); //int pos = 0; //foreach (var parameter in functiondeclaration.Parameters) //{ // var typeOfParameter = // StackContext.Context.Stack.Peek() // .GetVariable(parameter.NameOfVariable.Value, functiondeclaration.Position); // dynamic value = null; // if (typeOfParameter is StringType) // { // value = new StringValue { Value = parameters[pos] }; // } // else if (typeOfParameter is IntType) // { // value = new IntValue { Value = Convert.ToInt32(parameters[pos]) }; // } // // context.Response.Write($"<h3>\r\nValue : {parameter.NameOfVariable.Value} </h3> "); // StackContext.Context.Stack.Peek().SetVariableValue(parameter.NameOfVariable.Value, value); // pos++; //} //dynamic valueOfResponse = functiondeclaration.Execute(); //dynamic var1 = StackContext.Context.Stack.Peek().GetVariableValue(functiondeclaration.Identifier.NameOfVariable.Value + "ResponseForServer"); //Console.WriteLine(var1.Value); // Console.WriteLine(valueOfResponse.Value); // StackContext.Context.Stack.Pop(); foreach (var table in StackContext.Context.Stack) { foreach (var var in table.Values) { dynamic va = var.Value; Console.WriteLine($"Valor de : {var.Key.ToUpper()} es : { va.Value} "); } } Console.ReadKey(); }
public void TestScanWord() { var lex = new Lexer.Lexer(); var reader = new StringReader( "abc2" ); var t = lex.Scan( reader ); Assert.AreEqual( typeof( Word ), t.GetType() ); Assert.AreEqual( "abc2", ( (Word)t ).Lexeme ); }
public void ProcessRequest(HttpContext context) { // context = new HttpContext(new HttpRequest(String.Empty,String.Empty, queryString: String.Empty)); // var handler = new Handler(); if (Parser.CurrentToken.TokenType == TokenType.HTMLContent) { context.Response.Write(Parser.CurrentToken.Lexeme); } var parameters = new List <string>(); try { string[] keys = context.Request.Form.AllKeys; if (keys.Length > 0) { foreach (var table in StackContext.Context.Stack) { // table.Table = new Dictionary<string, BaseType>(); table.Values = new Dictionary <string, Value>(); table.ValuesOfArrays = new Dictionary <string, List <Value> >(); table.Variables = new Dictionary <string, TypesTable.Variable>(); table.ValuesofStructInstances = new Dictionary <string, List <Tuple <string, Value> > >(); } //parser = new Parser(lex); //parser.ValidateSemanticServer(); // parser.Interpret(); //context.Response.Write(replace2); //lex = new Lexer.Lexer(new SourceCode(replace2)); //parser = new Parser(lex); //StackContext.Context.Stack.Peek().Values.Remove("operar"); //StackContext.Context.Stack.Peek().Table.Remove("operar"); //StackContext.Context.Stack.Peek().Variables.Remove("operar"); //StackContext.Context.Stack.Peek().Values.Remove("operarResponseForServer"); //StackContext.Context.Stack.Peek().Table.Remove("operarResponseForServer"); //StackContext.Context.Stack.Peek().Variables.Remove("operarResponseForServer"); //StackContext.Context.Stack.Peek().Values.Remove("respuesta"); //StackContext.Context.Stack.Peek().Table.Remove("respuesta"); //StackContext.Context.Stack.Peek().Variables.Remove("respuesta"); StackContext.Context.FunctionsNodes.Remove("operar"); string arrayparams = ""; foreach (var key in HttpContext.Current.Request.Form.AllKeys) { var value = HttpContext.Current.Request.Form[key]; //context.Response.Write($"key : {key}, Value: {value} \n"); int number; var result = int.TryParse(value, out number); if (!result) { value = "\"" + value + "\""; } if (key != "elementsofarray") { parameters.Add(value); } if (key == "elementsofarray") { arrayparams = value.Replace("\"", "");; } } string csvString = string.Join(",", parameters); string newValue = "int respuesta = operar(" + csvString + ");"; context.Response.Write(newValue); var replace2 = ""; if (Lex.SourceCode._sourceCode.Contains("int respuesta = operar();")) { // var replace = parser.Lexer.SourceCode._sourceCode.Replace("int respuesta = operar();", newValue); replace2 = Lex.SourceCode._sourceCode.Replace("int respuesta = operar();", newValue); } string newValue2 = "int a[5] = {" + arrayparams + "};"; context.Response.Write(newValue2); if (Lex.SourceCode._sourceCode.Contains("int a[5] = {};")) { // var replace = parser.Lexer.SourceCode._sourceCode.Replace("int respuesta = operar();", newValue); replace2 = replace2.Replace("int a[5] = {};", newValue2); } Lex = new Lexer.Lexer(new SourceCode(replace2)); Parser = new Parser(Lex); var root = Parser.Parse(); //if (!_semanticIsValidated) //{ ValidateSemantic(root); //} //foreach (var statementNode in root) //{ // statementNode.ValidateSemantic(); // statementNode.Interpret(); Interpret(root); // } var functiondeclaration = StackContext.Context.FunctionsNodes["operar"]; StackContext.Context.Stack.Push(StackContext.Context.PastContexts[functiondeclaration.CodeGuid]); var pos = 0; foreach (var parameter in functiondeclaration.Parameters) { var typeOfParameter = StackContext.Context.Stack.Peek() .GetVariable(parameter.NameOfVariable.Value, functiondeclaration.Position); dynamic value = null; if (typeOfParameter is StringType) { value = new StringValue { Value = parameters[pos] }; } else if (typeOfParameter is IntType) { value = new IntValue { Value = Convert.ToInt32(parameters[pos]) }; } // context.Response.Write($"<h3>\r\nValue : {parameter.NameOfVariable.Value} </h3> "); StackContext.Context.Stack.Peek().SetVariableValue(parameter.NameOfVariable.Value, value); pos++; } functiondeclaration.Execute(); dynamic var1 = StackContext.Context.Stack.Peek().GetVariableValue(functiondeclaration.Identifier.NameOfVariable.Value + "ResponseForServer"); context.Response.Write($"<h3>\r\nName : {functiondeclaration.Identifier.NameOfVariable.Value + "ResponseForServer"} </h3> "); context.Response.Write($"<h3>\r\nResponse Operation : {var1.Value} </h3> "); StackContext.Context.Stack.Pop(); context.Response.Write("<h4>--------------------------------------------------------</h4> "); context.Response.Write(" FUNCTION CALL AND ARRAY"); context.Response.Write("<h4>--------------------------------------------------------</h4> "); dynamic var23 = StackContext.Context.Stack.Peek().GetVariableValue("sum"); context.Response.Write($"<h5><i>\r\n Suma de valores de arreglo: {var23.Value} </i></h5> "); context.Response.Write("<h4>--------------------------------------------------------</h4> "); context.Response.Write("<h4> TIPOS DE DATOS - ASIGNACIONES</h4> "); context.Response.Write("<h4>--------------------------------------------------------</h4> "); dynamic var3 = StackContext.Context.Stack.Peek().GetVariableValue("normalizationFactor"); context.Response.Write($"<h5><i>r\nNormalizationFactor (float) : {var3.Value} </i></h5> "); dynamic var4 = StackContext.Context.Stack.Peek().GetVariableValue("uno"); context.Response.Write($"<h5><i>\r\nUno (hexadecimal) : {var4.Value} </i></h5> "); dynamic var5 = StackContext.Context.Stack.Peek().GetVariableValue("dos"); context.Response.Write($"<h5><i>\r\nDos (binario) : {var5.Value} </i></h5>"); dynamic var6 = StackContext.Context.Stack.Peek().GetVariableValue("hola"); context.Response.Write($"<h5><i>\r\nHola (string) : {var6.Value} </i></h5> "); dynamic var7 = StackContext.Context.Stack.Peek().GetVariableValue("fecha"); context.Response.Write($"<h5><i>\r\nFecha (date) : {var7.Value} </i></h5> "); context.Response.Write("<h4>--------------------------------------------------------</h4> "); context.Response.Write("<h4> POINTERS </h4> "); context.Response.Write("<h4>--------------------------------------------------------</h4> "); //dynamic var8 = StackContext.Context.Stack.Peek().GetVariableValue("p1"); //context.Response.Write($"<h3>\r\n Pointer 1 : {var8.Value} </h3> "); //dynamic var9 = StackContext.Context.Stack.Peek().GetVariableValue("p2"); //context.Response.Write($"<h3>\r\n Pointer 2 : {var9.Value} </h3> "); //dynamic var10 = StackContext.Context.Stack.Peek().GetVariableValue("p3"); //context.Response.Write($"<h3>\r\n Pointer 3 : {var10.Value} </h3> "); //dynamic var11 = StackContext.Context.Stack.Peek().GetVariableValue("p4"); //context.Response.Write($"<h3>\r\n Pointer4 : {var11.Value} </h3> "); dynamic var12 = StackContext.Context.Stack.Peek().GetVariableValue("pc"); context.Response.Write($"<h5><i>\r\n Pointer PC: {var12.Value} </i></h5>"); dynamic var13 = StackContext.Context.Stack.Peek().GetVariableValue("c"); context.Response.Write($"<h5><i>\r\n Pointer C : {var13.Value} </i></h5>"); context.Response.Write("<h4>--------------------------------------------------------</h4> "); context.Response.Write("<h4> STRUCTS </h4> "); context.Response.Write("<h4>--------------------------------------------------------</h4> "); dynamic var14 = StackContext.Context.Stack.Peek().GetVariableValue("nombreantes"); context.Response.Write($"<h5><i>\r\n Nombre antes : {var14.Value} </i></h5>"); dynamic var15 = StackContext.Context.Stack.Peek().GetVariableValue("nombre"); context.Response.Write($"<h5><i>\r\n Nombre : {var15.Value} </i></h5>"); context.Response.Write("<h4>--------------------------------------------------------</h4> "); context.Response.Write(" ENUMS "); context.Response.Write("<h4>--------------------------------------------------------</h4> "); dynamic var16 = StackContext.Context.Stack.Peek().GetVariableValue("sec_level"); context.Response.Write($"<h5><i>\r\n sec_level : {var16.Value} </h3> "); dynamic var17 = StackContext.Context.Stack.Peek().GetVariableValue("my_security_level"); context.Response.Write($"<h5><i>\r\n my_security_level : {var17.Value} </i></h5> "); context.Response.Write("<h4>--------------------------------------------------------</h4> "); context.Response.Write(" ARRAYS "); context.Response.Write("<h4>--------------------------------------------------------</h4> "); dynamic var18 = StackContext.Context.Stack.Peek().GetVariableValue("azantes"); context.Response.Write($"<h5><i>\r\n Bidimensional arr antes: {var18.Value} </i></h5> "); dynamic var19 = StackContext.Context.Stack.Peek().GetVariableValue("azdespues"); context.Response.Write($"<h5><i>\r\n Bidimensional arr antes: {var19.Value} </i></h5> "); dynamic var20 = StackContext.Context.Stack.Peek().GetVariableValue("mark1pos"); context.Response.Write($"<h5><i>\r\n Unidimensional arr : {var20.Value} </i></h5> "); context.Response.Write("<h4>--------------------------------------------------------</h4> "); context.Response.Write(" FUNCTION REFERENCE CALL "); context.Response.Write("<h4>--------------------------------------------------------</h4> "); dynamic var21 = StackContext.Context.Stack.Peek().GetVariableValue("valuereference"); context.Response.Write($"<h5><i>\r\n Variable pasada con valor 20 : {var21.Value} </i></h5> "); dynamic var22 = StackContext.Context.Stack.Peek().GetVariableValue("valuereferenceresponse"); context.Response.Write($"<h5><i>\r\n Valor de retorno de funcion : {var22.Value} </i></h5> "); context.Response.Write("<h4>--------------------------------------------------------</h4> "); context.Response.Write(" TODOS LOS VALORES "); context.Response.Write("<h4>--------------------------------------------------------</h4> "); foreach (var table in StackContext.Context.Stack) { foreach (var var in table.Values) { dynamic va = var.Value; context.Response.Write($"<h5><i>\r\n Valor de : {var.Key.ToUpper()} es : { va.Value}</i></h5> "); } } foreach (var table in StackContext.Context.Stack) { // table.Table = new Dictionary<string, BaseType>(); table.Values = new Dictionary <string, Value>(); table.ValuesOfArrays = new Dictionary <string, List <Value> >(); table.Variables = new Dictionary <string, TypesTable.Variable>(); table.ValuesofStructInstances = new Dictionary <string, List <Tuple <string, Value> > >(); } } context.Response.Write("%>" + "\r\n\r\n" + "</div>" + "\r\n" + "</body>\r\n" + "</html>"); } catch (Exception exception) { // context.Response.Write($"<h3>{exception.Message} and {exception.InnerException} and {exception.StackTrace} and {exception.Source} and {exception.HelpLink}</h3>"); context.Response.Write($"<h3>{exception.Message} </h3>"); context.Response.Write("%>" + "\r\n\r\n" + "</div>" + "\r\n" + "</body>\r\n" + "</html>"); } }
/// <summary> /// Trigger completion. /// </summary> private void TriggerCompletion() { // the caret must be in a non-projection location var caretPoint = textView.Caret.Position.Point.GetPoint(textBuffer => (!textBuffer.ContentType.IsOfType("projection")), PositionAffinity.Predecessor); if (!caretPoint.HasValue) { return; } var cp = (SnapshotPoint)caretPoint; var position = cp.Position; // check for "//" before the current cursor position in this line var line = ((SnapshotPoint)caretPoint).Snapshot.GetLineFromPosition(((SnapshotPoint)caretPoint).Position); // get the text of this line var text = line.GetText(); // get only the text portion which is left of the actual position text = text.Substring(0, position - line.Start.Position); // if there is a comment starting, don't do auto completion if (text.IndexOf("//", StringComparison.CurrentCultureIgnoreCase) != -1) { return; } var lex = new Lexer(cp.Snapshot.GetText()); lex.AnalyzeForCommentsOnly(); var res = lex.Tokens.FirstOrDefault(x => x.Position < cp.Position && x.Position + x.Length >= cp.Position); if (res != null) { return; } session = provider.CompletionBroker.CreateCompletionSession(textView, caretPoint.Value.Snapshot.CreateTrackingPoint(caretPoint.Value.Position, PointTrackingMode.Positive), true); // subscribe to the Dismissed event on the session session.Dismissed += OnSessionDismissed; session.Start(); }
static void Main(string[] args) { if (args.Length != 1) { Console.WriteLine("Please provide input file"); Environment.Exit(-1); } string filePath = $@"{Environment.CurrentDirectory}\{args[0]}"; string fileName = Path.GetFileNameWithoutExtension(filePath); string fileDirectory = Path.GetDirectoryName(filePath); string inputText = File.ReadAllText(filePath); Lexer.Lexer lex = new Lexer.Lexer(inputText); List <Token> tokensToParse = new List <Token>(); using (StreamWriter tokenFile = new StreamWriter($@"{fileDirectory}\{fileName}.outlextokens")) using (StreamWriter tokenErrorFile = new StreamWriter($@"{fileDirectory}\{fileName}.outlexerrors")) { Token t; do { t = lex.GetNextToken(); //Console.WriteLine(t.ToString()); if (lex.IsErrorToken(t.TokenType)) { tokenErrorFile.WriteLine(t.ToString()); Console.WriteLine($"LexError: {t.ToString()}"); } else { tokenFile.WriteLine(t.ToString()); tokensToParse.Add(t); } }while (t.TokenType != TokenType.EOF); tokensToParse.RemoveAll(x => lex.IsCommentToken(x.TokenType)); Console.WriteLine("INFO: Lexing completed."); } using (StreamWriter astStream = new StreamWriter($@"{fileDirectory}\{fileName}.outast")) using (StreamWriter derivationsStream = new StreamWriter($@"{fileDirectory}\{fileName}.outderivation")) using (StreamWriter syntaxErrorStream = new StreamWriter($@"{fileDirectory}\{fileName}.outsyntaxerrors")) using (StreamWriter symbolTablesStream = new StreamWriter($@"{fileDirectory}\{fileName}.outsymboltables")) using (StreamWriter semanticErrorStream = new StreamWriter($@"{fileDirectory}\{fileName}.outsemanticerrors")) using (StreamWriter codeGenOutput = new StreamWriter($@"{fileDirectory}\{fileName}.moon")) { // Do parsing Parser.Parser parser = new Parser.Parser(tokensToParse, syntaxErrorStream, derivationsStream, astStream); Console.WriteLine(parser.Parse() ? "Parsing passed" : "Error: Parsing Failed"); var tree = parser.GetASTTree(); var printVisitor = new DOTPrinterVisitor(astStream); tree.Accept(printVisitor); astStream.Flush(); Console.WriteLine("INFO: AST Tree dumped to outast"); var symbolTableVisitor = new SymbolTableVisitor(semanticErrorStream); tree.Accept(symbolTableVisitor); Console.WriteLine("INFO: SymbolTable Generated"); var semanticCheckerVisitor = new SemanticCheckerVisitor(semanticErrorStream, symbolTableVisitor.GlobalSymbolTable); tree.Accept(semanticCheckerVisitor); Console.WriteLine("INFO: Semantic Checking Complete"); syntaxErrorStream.Flush(); semanticErrorStream.Flush(); bool hasErrors = semanticErrorStream.BaseStream.Length != 0 || syntaxErrorStream.BaseStream.Length != 0; if (hasErrors) { Console.WriteLine("Errors generated during parsing/semantic checking, terminating..."); Console.ReadKey(); Environment.Exit(-10); } // Codegen codeGenOutput.NewLine = "\n"; var codeWriter = new CodeWriter(codeGenOutput); var codeGen = new CodeGen.CodeGen(tree, symbolTableVisitor.GlobalSymbolTable, codeWriter); codeGen.GenerateCode(); symbolTablesStream.WriteLine(symbolTableVisitor.GlobalSymbolTable); Console.WriteLine("INFO: Code Generated"); Console.ReadKey(); } }
public void TestScanMultilineComment() { const string textComment = "Comment line 1\n" + "Comment line 2\n" + "Comment line *"; var lex = new Lexer.Lexer(); var reader = new StringReader( "/*" + textComment + "*/" ); var t = lex.Scan( reader ); Assert.AreEqual( typeof( Comment ), t.GetType() ); Assert.AreEqual( textComment, ( (Comment)t ).Value ); }
public Parser(Lexer.Lexer lexer) { this.lexer = lexer; }
public void ParseFiledOption_NOK06() { const string text = " (aaa=abcdefgh];"; // closing bracket is missing var lex = new Lexer(text) { Matches = Helper.SplitText(text) }; Assert.IsFalse(lex.ParseFieldOption()); Assert.AreEqual(1, lex.Tokens.Count); Assert.AreEqual(1, lex.Errors.Count); Assert.AreEqual(2, lex.Index); Assert.AreEqual(3, lex.Tokens[0].Position); Assert.AreEqual(3, lex.Tokens[0].Length); Assert.AreEqual(CodeType.Keyword, lex.Tokens[0].CodeType); }
public void MessageField_OK04() { const string text = " extensions 1000 to 1999, 6000 to max;"; var lex = new Lexer(text) { Matches = Helper.SplitText(text) }; Assert.IsTrue(lex.ParseMessageExtensions()); Assert.AreEqual(7, lex.Tokens.Count); Assert.AreEqual(0, lex.Errors.Count); Assert.AreEqual(9, lex.Index); }
public void ParseFiledOption_OK05() { const string text = " (aaa)=abcdefgh];"; var lex = new Lexer(text) { Matches = Helper.SplitText(text) }; Assert.IsTrue(lex.ParseFieldOption()); Assert.AreEqual(2, lex.Tokens.Count); Assert.AreEqual(0, lex.Errors.Count); Assert.AreEqual(4, lex.Index); Assert.AreEqual(3, lex.Tokens[0].Position); Assert.AreEqual(3, lex.Tokens[0].Length); Assert.AreEqual(CodeType.Keyword, lex.Tokens[0].CodeType); Assert.AreEqual(8, lex.Tokens[1].Position); Assert.AreEqual(8, lex.Tokens[1].Length); Assert.AreEqual(CodeType.Keyword, lex.Tokens[1].CodeType); }
public void MessageField_OK05() { const string text = " extensions 0x100c to 0x199c;"; var lex = new Lexer(text) { Matches = Helper.SplitText(text) }; Assert.IsTrue(lex.ParseMessageExtensions()); Assert.AreEqual(4, lex.Tokens.Count); Assert.AreEqual(0, lex.Errors.Count); Assert.AreEqual(5, lex.Index); }
public Parser(string prg) { lexer = new Lexer.Lexer(prg); }
public void TestScanNum() { var lex = new Lexer.Lexer(); var reader = new StringReader( "1234567890" ); var t = lex.Scan( reader ); Assert.AreEqual( typeof (Num), t.GetType() ); Assert.AreEqual( 1234567890, ( (Num) t ).Value ); }