public void TestExpressionFunctionReturnTypeBinding() { const string src = "namespace MyNamespace" + "{" + " class MyClass" + " {" + " func Add(int a, int b) -> return new MyClass2()" + " }" + "" + " class MyClass2" + " {" + " " + " }" + "}"; var lexer = new Lexer.Lexer(); var tokens = lexer.Lex(src); var parser = new Parser.Parser(tokens); var ast = parser.Parse(); var binder = new Binder(); var semanticModel = binder.Bind(new List<CompilationUnitSyntax> { ast }).Single(); var boundNamespace = semanticModel.Namespaces.Single(x => x.Name == "MyNamespace"); var expectedFunctionReturnType = boundNamespace.Types.Single(x => x.Name == "MyClass2"); var function = boundNamespace.Types.Single(x => x.Name == "MyClass").Functions.Single(x => x.Name == "Add"); Assert.AreSame(expectedFunctionReturnType, function.ReturnType); }
private void goButton_Click(object sender, RoutedEventArgs e) { Parser.Parser.Variable variableI; Parser.Parser parser = new Parser.Parser(); parser = new Parser.Parser(); parser.InputString = functionTextBox.Text; variableI = parser.GetVariable("x"); PointCollection p = new PointCollection(); Random r = new Random(); for (float i = 0; i <= 10; i += 0.5f) { points.pointX.Add(i); variableI.value = i; clearSignal.Add(variableI.value); //variableI.value = (i % 3 == 0) ? i * (float)r.NextDouble() : i; //noiseSignal.Add(variableI.value); points.pointY.Add(parser.Calculate()); p.Add(new Point(parser.Calculate(), i)); } chart.DataContext = p; }
public void TestParser() { var p = new Parser.Parser(); Assert.AreEqual(p.RemoveComments("lol;//Comment"), "lol;"); Assert.AreEqual(p.RemoveComments("lol;//Comment\nlol;//Comment 2\nlol/*Alex Sabala*/;"), "lol;\nlol;\nlol;"); Assert.AreEqual(p.Parse("lol;\nlol;").Count(), 4); }
public static Parser.Parser ParseCore(this string expression, ExprBuilder builder) { Scanner scanner = new Scanner(expression); Parser.Parser parser = new Parser.Parser(scanner, builder); parser.Parse(); return parser; }
public override string[] Build() { var options = new ParserOption(); var currentAssembly = GetType().Assembly; var name = new FileInfo(currentAssembly.Location).Name; var namespaces = currentAssembly.GetTypes().Select(t => t.Namespace).ToArray(); options.Namesapces.AddRange(namespaces); options.References.Add("System.dll"); options.References.Add(@".\" + name); options.SourceCode = Parameters.Template; options.VariableCollection.Add("Namespace", string.IsNullOrEmpty(Parameters.Namespace) ? "" : Parameters.Namespace); var tmp = new List<string>(); foreach (Table table in Parameters.DataBase.Tables) { options.VariableParameter = table; options.StatementParameters = new object[] {Parameters.DataBase, table}; var parser = new Parser.Parser(options); string temp = parser.Parse(ParserType.XCODER); string fileName = Path.Combine(Parameters.OutputDirectory.FullName, table.Name) + Extension; File.WriteAllText(fileName, temp); tmp.Add(fileName); } return tmp.ToArray(); }
public IncludeNode(ITokenReader tokenReader, INodeReader nodeReader, string directiveName = null, Location location = null) : base(directiveName, location) { Filename = ExpressionRewriter.Rewrite(tokenReader); this.parentParser = nodeReader as Parser.Parser; }
public static HassiumModule CompileModuleFromSource(string source) { var tokens = new Lexer().Scan(source); var ast = new Parser.Parser().Parse(tokens); var table = new SemanticAnalyzer().Analyze(ast); return new Compiler().Compile(ast, table); }
private Document Parse(string text) { Parser.Parser parsr = new Parser.Parser(); Document document; parsr.Parse(text, out document); return document; }
public Evaluator(Parser.Parser parser, Environment environmentExtensions, TextWriter stdOut, TextReader stdIn) { this.parser = parser; applier = new Applier(this); backqouteExpander = new BackquoteExpander(this); InitializeGlobalEnvironment(environmentExtensions, stdOut, stdIn); ReadCoreLanguageCode(); }
public PackageViewModel(ObservableCollection<string> packages, XElement document) { var parser = new Parser.Parser(document); Packages = new Dictionary<string, ObservableCollection<string>>(); foreach (var package in packages) { Packages.Add(package, new ObservableCollection<string>(parser.ExtractPackageForms(package))); } }
internal Document LoadDocument(Stream template) { Parser.Parser parser = new Parser.Parser(); Document document; if (parser.Parse(template, out document)) { return document; } throw new Exception("Unable to parse: "); }
internal static Document LoadDocument(string template) { Parser.Parser parser = new Parser.Parser(); Document document; if (parser.Parse(template, out document)) { return document; } throw new Exception("Unable to parse: " + parser.ErrorString); }
protected string Render(string parrot, object model, IHost host) { Parser.Parser parser = new Parser.Parser(host); Document document; parser.Parse(parrot, out document); DocumentRenderer renderer = new DocumentRenderer(new MemoryHost()); StringBuilder sb = new StringBuilder(); return renderer.Render(document, model); }
private void ParseIntoDictionary(Dictionary<string, string> keyVals, string fragmentVal) { Parser.Parser p = new Parser.Parser(); while (fragmentVal.Length > 0 && p.Validate(fragmentVal) && p.IsAtomic(fragmentVal)) { string firstKey = p.GetFirstKey(fragmentVal); keyVals[firstKey] = p.Extract(firstKey, fragmentVal); fragmentVal = p.TrimKeyVal(firstKey, fragmentVal); } }
public void TestClass() { var lexer = new Lexer.Lexer(); var tokens = lexer.Lex( string.Format(NamespaceSource, string.Format(ClassSource, string.Empty, string.Empty, string.Empty))); var parser = new Parser.Parser(tokens); var ast = parser.Parse(); Assert.IsInstanceOf<CompilationUnitSyntax>(ast); Assert.IsNotEmpty(ast.Namespaces); Assert.IsNotEmpty(ast.Namespaces[0].Classes); Assert.AreEqual("ClassSample", ast.Namespaces[0].Classes[0].Name.Value); Assert.AreEqual("DSampleProtocol", ast.Namespaces[0].Classes[0].ProtocolName.Value); }
public Fragment(string fragmentString) { Parser.Parser p = new Parser.Parser(); if (!p.IsAtomic(fragmentString)) { fragmentString = "item={" + fragmentString + "}"; } if (!p.Validate(fragmentString)) { //if its not valid, its broken throw new ArgumentException("invalid fragment: " + fragmentString); } ParseIntoDictionary(keyVals, fragmentString); }
public void TestExpressionFunction() { const string functionSource = "func FunctionSample(int a, int b, int c) -> return a * b * c"; var lexer = new Lexer.Lexer(); var tokens = lexer.Lex( string.Format( NamespaceSource, string.Format(ClassSource, functionSource, string.Empty, string.Empty))); var parser = new Parser.Parser(tokens); var ast = parser.Parse(); Assert.IsNotEmpty(ast.Namespaces[0].Classes[0].Functions); Assert.IsInstanceOf<FunctionSyntax>(ast.Namespaces[0].Classes[0].Functions[0]); Assert.AreEqual("FunctionSample", ast.Namespaces[0].Classes[0].Functions[0].Name.Value); }
private void goButton_Click(object sender, RoutedEventArgs e) { Parser.Parser.Variable VariableI; Parser.Parser parser = new Parser.Parser(); parser = new Parser.Parser(); parser.InputString = FunctionBox.Text; VariableI = parser.GetVariable("x"); PointCollection p = new PointCollection(); for (float i = 0; i < 10; i += 0.5f) { VariableI.value = i; p.Add(new Point(parser.Calculate(), i)); } chart.DataContext = p; }
public int Run() { var parser = new Parser.Parser(_parserStyle, Grouping); foreach (Argument argument in GetArguments()) parser.Arguments.Add(argument); foreach (Option option in GetOptions()) parser.Options.Add(option); foreach (Command command in GetCommands()) parser.Commands.Add(command); try { ParseResult result = parser.Parse(Environment.GetCommandLineArgs().Skip(1)); return Handle(result); } catch (Exception ex) { Console.WriteLine(ex); return -1; } }
public string Compile(string program) { Kernel = new List<uint>(KERNEL_SIZE); for (int i = 0; i < KERNEL_SIZE; i++) { Kernel.Add(0); } Parser.Parser parser = new Parser.Parser(); string output; try { var tree = parser.Parse(program); output = CompileTree(tree); } catch (Exception e) { Console.WriteLine($"Error {e.Message} on line {LineNumber}"); throw; } return output; }
public void Evaluation() { string input = "x${test - 42}.2"; string expected = "x42.2"; var context = new ExecutionContext(); context.SetVariable("test", 84); var output = new StringBuilder(); using(var reader = new LocatedTextReaderWrapper(input)) { var parser = new Parser.Parser(reader); foreach(var node in parser.ReadNodes()) { output.Append(node.Execute(context)); } Assert.AreEqual(expected, output.ToString()); } }
public void TestFieldTypeToVariableBinding() { const string src = "namespace MyNamespace" + "{" + " class MyClass" + " {" + " var field : false" + " var field2 : new MyClass2()" + " func MyFunc()" + " {" + " var i : field" + " var i2 : field2" + " }" + " }" + "" + " class MyClass2" + " {" + " " + " }" + "}"; var lexer = new Lexer.Lexer(); var tokens = lexer.Lex(src); var parser = new Parser.Parser(tokens); var ast = parser.Parse(); var binder = new Binder(); var semanticModel = binder.Bind(new List<CompilationUnitSyntax> { ast }).Single(); var boundNamespace = semanticModel.Namespaces.Single(x => x.Name == "MyNamespace"); var referencedBoundType = boundNamespace.Types.Single(x => x.Name == "MyClass2"); var boundType = boundNamespace.Types.Single(x => x.Name == "MyClass"); var boundFunction = (BoundFunction)boundType.Functions.Single(x => x.Name == "MyFunc"); Assert.IsInstanceOf<BoolCompilerGeneratedType>( ((IBoundMember)((BoundScopeStatement)boundFunction.Statements).Statements[0]).Type); Assert.AreSame( ((IBoundMember)((BoundScopeStatement)boundFunction.Statements).Statements[1]).Type, referencedBoundType); }
static void Main(string[] args) { var sourceString = GetResource("slip1"); var template = new Node.Node { ChildNodes = new List <INode> { new Node.Node { Comment = "Parse the transaction date time", Pattern = @"(?<datepart>\d\d\d\d-\d\d-\d\d) (?<timepart>\d\d:\d\d:\d\d)", ChildNodes = new List <INode> { new Node.Node { Comment = "Save the date part into a new variable called dateandtimecombimed", TargetVariable = "dateandtimecombimed", FromVariable = "datepart" }, new Node.Node() { Comment = "Take the value from timepart, and append it to dateandtimecombimed, using a setter", TargetVariable = "dateandtimecombimed", FromVariable = "timepart", Setter = "|OLD| |NEW|" }, new Node.Node() { Comment = "Write the value from dateandtimecombimed into the result model", Target = "TransactionTime", FromVariable = "dateandtimecombimed", Format = "yyyy-MM-dd HH:mm:ss" } } }, new Node.Node() { Comment = "Grab the part of the slip that contains the line items.", Pattern = "LINE ITEMS START --->(?<lineitemssection>.*?)<--- LINE ITEMS END", Target = "LineItems", ChildNodes = new List <INode> { new Node.Node { Pattern = @"(?<desc>[^\n\r]+)(?<quantity>\d+?) (?<unitprice>\d+\.\d\d?) (?<linetotal>\d+\.\d\d?)", ChildNodes = new List <INode> { new Node.Node { Target = "Description", FromVariable = "desc" }, new Node.Node { Target = "Quantity", FromVariable = "quantity" }, new Node.Node { Target = "UnitPrice", FromVariable = "unitprice" }, new Node.Node { Target = "LineTotal", FromVariable = "linetotal" } } } } } } }; var templateJson = JsonConvert.SerializeObject(template, Newtonsoft.Json.Formatting.Indented, new JsonSerializerSettings { NullValueHandling = NullValueHandling.Ignore, ContractResolver = new ShouldSerializeContractResolver() }); var parser = new Parser.Parser(); var resultSlip = parser.Text2Object <SlipModel>(template, sourceString).Result; }
private static bool ProcessFile(string script) { if (!File.Exists(script)) { Console.Error.WriteLine("Failed to read file {0}", script); return(false); } timer.Restart(); FileInfo fileInfo = new FileInfo(script); if (fileInfo.Length == 0) { Console.WriteLine("Source file empty, skipping."); return(false); } string[] scriptLines = File.ReadAllLines(script); string scriptData = scriptLines.Aggregate((a, b) => a + "\n" + b); Lexer.Lexer lexer = new Lexer.Lexer(scriptData); Console.WriteLine("Running lexical analysis. [+{0}ms]", timer.ElapsedMilliseconds); int error = lexer.Analyse(); if (error != 0) { Console.Error.WriteLine("Failed due to error {0}", error); return(false); } #if DEBUG { int preprocessors = lexer.Tokens.Count(token => token.GetType() == typeof(PreprocessorToken)); int comments = lexer.Tokens.Count(token => token.GetType() == typeof(CommentToken)); int separators = lexer.Tokens.Count(token => token.GetType() == typeof(SeparatorToken)); int operators = lexer.Tokens.Count(token => token.GetType() == typeof(OperatorToken)); int literals = lexer.Tokens.Count(token => token.GetType() == typeof(LiteralToken)); int keywords = lexer.Tokens.Count(token => token.GetType() == typeof(KeywordToken)); int identifiers = lexer.Tokens.Count(token => token.GetType() == typeof(IdentifierToken)); Console.WriteLine("DEBUG: Preprocessor: {0} Comments: {1} Separators: {2} " + "Operators: {3} Literals: {4} Keywords: {5} Identifiers: {6}", preprocessors, comments, separators, operators, literals, keywords, identifiers); } { Console.WriteLine("DEBUG: Converting tokens back to source and comparing."); Output_Nss debugOutput = new Output_Nss(); error = debugOutput.GetFromTokens(lexer.Tokens, out string data); if (error != 0) { Console.Error.WriteLine("DEBUG: Failed due to error {0}", error); return(false); } string[] reformattedData = data.Split('\n'); int sourceLines = scriptLines.Count(); int dataLines = reformattedData.Count(); if (sourceLines != dataLines) { Console.Error.WriteLine("DEBUG: Failed due to mismatch in line count. " + "Source: {0}, Data: {1}", sourceLines, dataLines); return(false); } for (int i = 0; i < scriptLines.Length; ++i) { string sourceLine = scriptLines[i]; string dataLine = reformattedData[i]; if (sourceLine != dataLine) { Console.Error.WriteLine("DEBUG: Failed due to mismatch in line contents. " + "Line {0}.\n" + "Source line len: {1}\nData line len: {2}\n" + "Source line: {3}\nData line: {4}", i, sourceLine.Length, dataLine.Length, sourceLine, dataLine); break; } } } #endif Console.WriteLine("Running parser. [+{0}ms]", timer.ElapsedMilliseconds); Parser.Parser parser = new Parser.Parser(); error = parser.Parse(Path.GetFileName(script), scriptLines, lexer.Tokens); if (error != 0) { Console.Error.WriteLine("Failed due to error {0}", error); foreach (string errStr in parser.Errors) { Console.Error.WriteLine(" {0}", errStr); } return(false); } Console.WriteLine("Running output. [+{0}ms]", timer.ElapsedMilliseconds); Output_CSharp output = new Output_CSharp(); error = output.GetFromCU(parser.CompilationUnit, out string outputStr, out string className); if (error != 0) { Console.Error.WriteLine("Failed due to error {0}", error); return(false); } string outputPath = Path.Combine(destDir, Path.ChangeExtension(className, ".cs")); File.WriteAllText(outputPath, outputStr); return(true); }
/// <summary> /// initializes this phase. /// </summary> public ParsePhase(Parser.Parser parser) { _parser = parser; this.Name = "ast-parsing"; }
private Completion CreateDynamicFunction(Object?newTarget, FunctionKind kind, IReadOnlyList <IValue> arguments) { if (Interpreter.Instance().ExecutionContextStackSize() < 2) { throw new InvalidOperationException("Spec 19.2.1.1.1 step 1"); } var callerContext = Interpreter.Instance().SecondExecutionContext(); var callerRealm = callerContext.Realm; var calleeRealm = Interpreter.Instance().CurrentRealm(); //TODO HostEnsureCanCompileStrings if (newTarget == null) { newTarget = this; } Func <Parser.Parser, AST.FunctionStatementList> goal; Func <Intrinsics, Object> fallbackProto; switch (kind) { case FunctionKind.Normal: goal = (p) => p.ParseFunctionBody(); fallbackProto = i => i.FunctionPrototype; break; default: throw new NotImplementedException("Async and/or generators"); } var argCount = arguments.Count; var P = ""; string bodyText; if (argCount == 0) { bodyText = ""; } else if (argCount == 1) { var bodyComp = arguments[0].ToJsString(); if (bodyComp.IsAbrupt()) { return(bodyComp); } bodyText = (bodyComp.value as StringValue) !.@string; } else { var firstArg = arguments[0]; var pComp = firstArg.ToJsString(); if (pComp.IsAbrupt()) { return(pComp); } P = (pComp.value as StringValue) !.@string; int k = 1; for (; k < argCount - 1; k++) { var nextArg = arguments[k]; var nextArgStringComp = nextArg.ToJsString(); if (nextArgStringComp.IsAbrupt()) { return(nextArgStringComp); } var nextArgString = (nextArgStringComp.value as StringValue) !.@string; P += "," + nextArgString; } var bodyComp = arguments[k].ToJsString(); if (bodyComp.IsAbrupt()) { return(bodyComp); } bodyText = (bodyComp.value as StringValue) !.@string; } AST.FormalParameters?parameters = new AST.FormalParameters(); try { if (!string.IsNullOrEmpty(P)) { parameters = new Parser.Parser(P).ParseFormalParameters(); } if (parameters == null) { throw new Parser.ParseFailureException($"parameters {P} could not be parsed."); } } catch (Parser.ParseFailureException e) { return(Completion.ThrowSyntaxError($"Failed to parse parameters \"{P}\".\n{e.Message}")); } AST.FunctionStatementList body; try { body = goal(new Parser.Parser(bodyText)); } catch (Parser.ParseFailureException e) { return(Completion.ThrowSyntaxError($"Failed to parse body \"{bodyText}\".\n{e.Message}")); } //TODO detect strict mode: ContainsUseStrict bool strict = body.IsStrictMode; if (!parameters.IsSimpleParameterList() && strict) { return(Completion.ThrowSyntaxError($"parameters must be simple in strict mode. \"{P}\"")); } //TODO implement tree walking for checking if parameters or body contains a SuperCall or SuperProperty //TODO generator yield, async await errors var protoComp = Utils.GetPrototypeFromConstructor(newTarget, fallbackProto); if (protoComp.IsAbrupt()) { return(protoComp); } var proto = protoComp.value; var F = FunctionObject.FunctionAllocate(proto !, strict, kind); var realmF = F.Realm; var scope = realmF.GlobalEnv; FunctionObject.FunctionInitialize(F, FunctionCreateKind.Normal, parameters, body, scope); //TODO generator, async generator if (kind == FunctionKind.Normal) { F.MakeConstructor(); } F.SetFunctionName("anonymous"); return(Completion.NormalCompletion(F)); }
/// <summary> /// Parses parameters. /// </summary> /// <param name="args">The list of arguments to store.</param> /// <param name="tokenIt">The token iterator</param> /// <param name="parser">The parser</param> /// <param name="meta">The function meta for checking parameters</param> /// <param name="expectParenthesis">Whether or not to expect parenthis to designate the start of the parameters.</param> /// <param name="enableNewLineAsEnd">Whether or not to treat a newline as end</param> public static void ParseFuncParameters(List <Expr> args, TokenIterator tokenIt, Parser.Parser parser, bool expectParenthesis, bool enableNewLineAsEnd, FunctionMetaData meta) { int totalParameters = 0; if (tokenIt.NextToken.Token == Tokens.LeftParenthesis) { expectParenthesis = true; } // START with check for "(" if (expectParenthesis) { tokenIt.Expect(Tokens.LeftParenthesis); } bool passNewLine = !enableNewLineAsEnd; var endTokens = BuildEndTokens(enableNewLineAsEnd, meta); int totalNamedParams = 0; var hasMetaArguments = meta != null && meta.ArgumentNames != null && meta.ArgumentNames.Count > 0; while (true) { Expr exp = null; // Check for end of statment or invalid end of script. if (parser.IsEndOfParameterList(Tokens.RightParenthesis, enableNewLineAsEnd)) { break; } if (tokenIt.NextToken.Token == Tokens.Comma) { tokenIt.Advance(); } var token = tokenIt.NextToken.Token; var peek = tokenIt.Peek().Token; var isVar = parser.Context.Symbols.Contains(token.Text); var isParamNameMatch = hasMetaArguments && meta.ArgumentsLookup.ContainsKey(token.Text); var isKeywordParamName = token.Kind == TokenKind.Keyword && isParamNameMatch; // CASE 1: Named params for external c# object method calls // CASE 2: Named params for internal script functions ( where we have access to its param metadata ) if ((meta == null && token.Kind == TokenKind.Ident && peek == Tokens.Colon) || (token.Kind == TokenKind.Ident && isParamNameMatch && !isVar) || (token.Kind == TokenKind.Ident && !isParamNameMatch && !isVar && peek == Tokens.Colon) || (isKeywordParamName && !isVar)) { var paramName = token.Text; var namedParamToken = tokenIt.NextToken; tokenIt.Advance(); // Advance and check if ":" if (tokenIt.NextToken.Token == Tokens.Colon) { tokenIt.Advance(); } exp = parser.ParseExpression(endTokens, true, false, true, passNewLine, true); exp = Exprs.NamedParam(paramName, exp, namedParamToken); args.Add(exp); totalNamedParams++; } // CASE 2: Name of variable being passed to function is same as one of the parameter names. else if (isVar && hasMetaArguments && meta.ArgumentsLookup.ContainsKey(token.Text)) { // Can not have normal parameters after named parameters. if (totalNamedParams > 0) { throw tokenIt.BuildSyntaxException("Un-named parameters must come before named parameters"); } var next = tokenIt.Peek(); if (next.Token.Kind == TokenKind.Symbol) { exp = parser.ParseExpression(endTokens, true, false, true, passNewLine, false); } else { exp = parser.ParseIdExpression(null, null, false); } args.Add(exp); } // CASE 3: Normal param else { // Can not have normal parameters after named parameters. if (totalNamedParams > 0) { throw tokenIt.BuildSyntaxException("Un-named parameters must come before named parameters"); } exp = parser.ParseExpression(endTokens, true, false, true, passNewLine, true); args.Add(exp); } totalParameters++; parser.Context.Limits.CheckParserFunctionParams(exp, totalParameters); // Check for end of statment or invalid end of script. if (parser.IsEndOfParameterList(Tokens.RightParenthesis, enableNewLineAsEnd)) { break; } // Advance if not using fluent-parameters if (meta == null) { tokenIt.Expect(Tokens.Comma); } } // END with check for ")" if (expectParenthesis) { tokenIt.Expect(Tokens.RightParenthesis); } }
/// <summary> /// Parses and converts input from argument to formula /// </summary> /// <exception cref="Parser.ParseException">Thrown if input can't be represented as formula</exception> /// <param name="unparsedFormula">String representing formula</param> /// <returns>Formula representing the input</returns> public Formula Parse(string unparsedFormula) { var parser = new Parser.Parser(unparsedFormula); return(parser.ParseFormula()); }
public void TestResNet50DeploySuccessParser() { var parser = new Parser.Parser(); parser.Parse(@"CaffeTests\ResNet-50-deploy.prototxt"); }
private async void Button_Click(object sender, RoutedEventArgs e) { double tr = 0; if (double.TryParse(Left.Text, out tr) || double.TryParse(Eps.Text, out tr) || double.TryParse(Right.Text, out tr)) { } else { string expression = Expression.Text; Chart chart = this.FindName("MyWinformChart") as Chart; chart.Series.Clear(); chart.Series.Add(new Series("Series1")); chart.Series[0].BorderWidth = 5; chart.Series["Series1"].ChartArea = "Default"; chart.Series["Series1"].ChartType = SeriesChartType.Spline; double left = Convert.ToDouble(Left.Text); double right = Convert.ToDouble(Right.Text); double step = (right - left) / 20; Parser.Parser parser = new Parser.Parser(expression); if (MainWindow.isConnected) { var values = new Dictionary <string, string> { { "Action", "CalculateSecond" }, { "Func", expression }, { "Left", Left.Text }, { "Right", Right.Text }, { "Eps", Eps.Text } }; var response = await Sender.Sender.Send(values); var responseString = await response.Content.ReadAsStringAsync(); Regex argument = new Regex(@"([^\&]*)=([^\&]*)&", RegexOptions.Compiled); MatchCollection matches = argument.Matches(responseString); Dictionary <string, double> arguments = new Dictionary <string, double>(); foreach (Match item in matches) { arguments.Add(item.Groups[1].Value, double.Parse(item.Groups[2].Value)); } for (double i = 0; i < arguments.Count / 2 - 1; i += 2) { chart.Series.Add(new Series($"Series{2 + i}")); chart.Series[$"Series{2 + i}"].ChartArea = "Default"; chart.Series[$"Series{2 + i}"].ChartType = SeriesChartType.Spline; chart.Series[$"Series{2 + i}"].BorderWidth = 3; chart.Series[$"Series{2 + i}"].Points.AddXY(arguments[$"X{i}"], arguments[$"Y{i}"]); chart.Series[$"Series{2 + i}"].Points.AddXY(arguments[$"X{i + 1}"], arguments[$"Y{i + 1}"]); Result.Text = arguments["Value"].ToString(); } } else { FPtr f = new FPtr(parser.Calculate); int n = 0; double[] res = new double[1000]; double integralValue = 0; integralValue = MidFind(res, ref n, f, left, right, Convert.ToDouble(Eps.Text)); Result.Text = integralValue.ToString(); } for (double x = left; x < right + step / 2; x += step) { chart.Series["Series1"].Points.AddXY(x, parser.Calculate(x)); } } }
private async void Button_Click(object sender, RoutedEventArgs e) { double res = 0; if (double.TryParse(Left.Text, out res) || double.TryParse(Step.Text, out res) || double.TryParse(Right.Text, out res)) { } else { string expression = Expression.Text; string[] methods = { "L", "T", "M" }; bool?[] method = { LeftIntegral.IsChecked, TrapIntegral.IsChecked, MidIntegral.IsChecked }; int nomer = 0; for (int i = 0; i < 3; i++) { if (method[i] == true) { nomer = i; break; } } var values = new Dictionary <string, string> { { "Action", "CalculateIntegral" }, { "Func", expression }, { "Left", Left.Text }, { "Right", Right.Text }, { "Step", Step.Text }, { "Method", methods[nomer] } }; var response = await Sender.Sender.Send(values); var responseString = await response.Content.ReadAsStringAsync(); Regex argument = new Regex(@"([^\&]*)=([^\&]*)&", RegexOptions.Compiled); MatchCollection matches = argument.Matches(responseString); Dictionary <string, double> arguments = new Dictionary <string, double>(); foreach (Match item in matches) { arguments.Add(item.Groups[1].Value, double.Parse(item.Groups[2].Value)); } Parser.Parser parser = new Parser.Parser(expression); Chart chart = this.FindName("MyWinformChart") as Chart; chart.Series["Series1"].Points.Clear(); chart.Series["Series2"].Points.Clear(); double left = Convert.ToDouble(Left.Text); double right = Convert.ToDouble(Right.Text); double step = Convert.ToDouble(Step.Text); for (double i = 0; i < arguments.Count / 2 - 1; i++) { chart.Series["Series1"].Points.AddXY(arguments[$"X{i}"], arguments[$"Y{i}"]); } for (double x = left; x < right + step / 2; x += step) { chart.Series["Series2"].Points.AddXY(x, parser.Calculate(x)); } Result.Text = arguments["Value"].ToString(); } }
public static Dictionary<string, string> LoadFromHashToTagsIndex(SyncProfile sp) { Dictionary<string, string> hashToTagString = new Dictionary<string, string>(); Parser.Parser p = new Parser.Parser(); //use syncprofile to load an existing one if it is there string exportedTagsIndexPath = HashToTagsIndexPath(sp, false); if (File.Exists(exportedTagsIndexPath)) { foreach (string lineItem in File.ReadLines(exportedTagsIndexPath)) { string hash = p.Extract("sha1Hash", lineItem); string tagString = p.Extract("tags", lineItem); hashToTagString[hash] = tagString; } } return hashToTagString; }
public void UpdateStatus(string item, bool completed, bool archived) { Parser.Parser p = new Parser.Parser(); string itemValue = p.Extract("item", item); if (string.IsNullOrWhiteSpace(itemValue)) { itemValue = item; } ToDoItem tdi = new ToDoItem() { Description = itemValue, Completed = completed, Archived = archived }; //tdi.Add(new Fragment(item)); AddWithMerge(tdi); }
protected override SimpleResponse Respond(AliceRequest request, State state) { using var db = new DatabaseContext(); Parser.Parser s = new Parser.Parser(); var raspisanie = s.getRasp(db.Users.Find(request.Session.UserId).group); int requestDaysId = 0; string str = ""; var day = request.Request.Nlu.Tokens[request.Request.Nlu.Tokens.Count - 1]; if (request.Request.Command.Contains("следующ")) { switch (day) { case "понедельник": requestDaysId = 7; foreach (var lesson in raspisanie[7].lessons) { if (lesson.Value != "" && lesson.Value != "-") { str = str + "В " + lesson.Key + " " + lesson.Value + " \n"; } } break; case "вторник": requestDaysId = 8; foreach (var lesson in raspisanie[8].lessons) { if (lesson.Value != "" && lesson.Value != "-") { str = str + "В " + lesson.Key + " " + lesson.Value + " \n"; } } break; case "среду": requestDaysId = 9; foreach (var lesson in raspisanie[9].lessons) { if (lesson.Value != "" && lesson.Value != "-") { str = str + "В " + lesson.Key + " " + lesson.Value + " \n"; } } break; case "четверг": requestDaysId = 10; foreach (var lesson in raspisanie[10].lessons) { if (lesson.Value != "" && lesson.Value != "-") { str = str + "В " + lesson.Key + " " + lesson.Value + " \n"; } } break; case "пятницу": requestDaysId = 11; foreach (var lesson in raspisanie[11].lessons) { if (lesson.Value != "" && lesson.Value != "-") { str = str + "В " + lesson.Key + " " + lesson.Value + " \n"; } } break; case "субботу": requestDaysId = 12; foreach (var lesson in raspisanie[12].lessons) { if (lesson.Value != "" && lesson.Value != "-") { str = str + "В " + lesson.Key + " " + lesson.Value + " \n"; } } break; case "воскресенье": requestDaysId = 13; str = " в этот день у вас нет пар"; break; case "воскресение": requestDaysId = 13; str = " в этот день у вас нет пар"; break; } } else { switch (day) { case "понедельник": requestDaysId = 0; foreach (var lesson in raspisanie[0].lessons) { if (lesson.Value != "" && lesson.Value != "-") { str = str + "В " + lesson.Key + " " + lesson.Value + " \n"; } } break; case "вторник": requestDaysId = 1; foreach (var lesson in raspisanie[1].lessons) { if (lesson.Value != "" && lesson.Value != "-") { str = str + "В " + lesson.Key + " " + lesson.Value + " \n"; } } break; case "среду": requestDaysId = 2; foreach (var lesson in raspisanie[2].lessons) { if (lesson.Value != "" && lesson.Value != "-") { str = str + "В " + lesson.Key + " " + lesson.Value + " \n"; } } break; case "четверг": requestDaysId = 3; foreach (var lesson in raspisanie[3].lessons) { if (lesson.Value != "" && lesson.Value != "-") { str = str + "В " + lesson.Key + " " + lesson.Value + " \n"; } } break; case "пятницу": requestDaysId = 4; foreach (var lesson in raspisanie[4].lessons) { if (lesson.Value != "" && lesson.Value != "-") { str = str + "В " + lesson.Key + " " + lesson.Value + " \n"; } } break; case "субботу": requestDaysId = 5; foreach (var lesson in raspisanie[5].lessons) { if (lesson.Value != "" && lesson.Value != "-") { str = str + "В " + lesson.Key + " " + lesson.Value + " \n"; } } break; case "воскресенье": requestDaysId = 6; foreach (var lesson in raspisanie[6].lessons) { if (lesson.Value != "" && lesson.Value != "-") { str = str + "В " + lesson.Key + " " + lesson.Value + " \n"; } } break; case "воскресение": requestDaysId = 6; foreach (var lesson in raspisanie[6].lessons) { if (lesson.Value != "" && lesson.Value != "-") { str = str + "В " + lesson.Key + " " + lesson.Value + " \n"; } } break; } } var today = DateTime.Today.DayOfWeek.ToString(); int todaysId = 0; switch (today.ToLower()) { case "monday": todaysId = 0; break; case "tuesday": todaysId = 1; break; case "wednesday": todaysId = 2; break; case "thursday": todaysId = 3; break; case "friday": todaysId = 4; break; case "saturday": todaysId = 5; break; case "sunday": todaysId = 6; break; } if (requestDaysId == todaysId) { if (str == "") { return new SimpleResponse { Text = "В " + day + " у вас нет пар" } } ; else { return new SimpleResponse { Text = "Если вы про сегодня, \n" + str } }; } else { if (str == "") { return new SimpleResponse { Text = "В " + day + " у вас нет пар" } } ; else { return new SimpleResponse { Text = "В " + day + " у вас \n" + str } }; } } }
/// <summary> /// Check the validity of a progress code in the point of view of the appbuilder (make sure it can be opened within the appbuilder) /// </summary> public static void DisplayParserErrors(bool silent = false) { if (Npp.CurrentFile.IsProgress && !_displayParserErrorsIgnoredFiles.Contains(Npp.CurrentFile.Path)) { Task.Factory.StartNew(() => { var currentFilePath = Npp.CurrentFile.Path; var message = new StringBuilder(); message.Append("The analyzed file was :<br>" + currentFilePath.ToHtmlLink() + "<br>"); var parser = new Parser.Parser(Sci.Text, currentFilePath, null, false); var parserErrors = parser.ParseErrorsInHtml; if (!string.IsNullOrEmpty(parserErrors)) { message.Append("<br>The parser found the following syntax errors :<br>"); message.Append(parserErrors); } var blockTooLong = new StringBuilder(); foreach (var scope in parser.ParsedItemsList.Where(item => item is ParsedImplementation || item is ParsedProcedure || item is ParsedOnStatement).Cast <ParsedScopeItem>()) { if (CheckForTooMuchChar(scope)) { blockTooLong.AppendLine("<div>"); blockTooLong.AppendLine(" - " + (scope.FilePath + "|" + scope.Line).ToHtmlLink("Line " + (scope.Line + 1) + " : <b>" + scope.Name + "</b>") + " (" + NbExtraCharBetweenLines(scope.Line, scope.EndBlockLine) + " extra chars)"); blockTooLong.AppendLine("</div>"); } } if (blockTooLong.Length > 0) { message.Append("<br>This file is currently unreadable in the AppBuilder.<br>The following blocks contain more characters than the max limit (" + Config.Instance.GlobalMaxNbCharInBlock + " characters) :<br>"); message.Append(blockTooLong); message.Append("<br><i>To prevent this, reduce the number of characters in the above blocks.<br>Deleting dead code and trimming spaces is a good place to start!</i>"); } // no errors var noProb = blockTooLong.Length == 0 && string.IsNullOrEmpty(parserErrors); if (noProb) { if (silent) { return; } message.Append("No problems found!"); } else { if (silent) { message.Append("<br><br>" + "disable".ToHtmlLink("Click here to disable the automatic check for this file")); } } UserCommunication.NotifyUnique("DisplayParserErrors", message.ToString(), noProb ? MessageImg.MsgOk : MessageImg.MsgWarning, "Check code validity", "Analysis results", args => { if (args.Link.Equals("disable")) { args.Handled = true; UserCommunication.CloseUniqueNotif("DisplayParserErrors"); if (!_displayParserErrorsIgnoredFiles.Contains(currentFilePath)) { _displayParserErrorsIgnoredFiles.Add(currentFilePath); } } }, noProb ? 5 : 0); }); } }
public Templater(TargetLanguage target, Parser.Parser parser) { Target = target; lines = parser.GenerateAbstractSyntaxTree(); }
public void TestGoogLeNetDeploySuccessParser() { var parser = new Parser.Parser(); parser.Parse(@"CaffeTests\GoogLeNet-deploy.prototxt"); }
public void Compile() { string filename = _filename; string suffix = ""; int dot = filename.IndexOf("."); try{ if (dot > -1) { suffix = filename.Substring(dot + 1); } if (dot == -1 || suffix.ToLower() != "cbl") { Console.WriteLine("Program filename must end in .cbl"); return; } Console.WriteLine("Compiling " + filename); string assemblyName = filename.Substring(0, dot); //Reading... StreamReader sr = new StreamReader(filename); string program = ""; while (sr.Peek() != -1) { program += sr.ReadLine() + "\n"; } sr.Close(); ReferenceManager referenceManager = new ReferenceManager(_references); //Parsing... Parser.Parser parser = new Parser.Parser(); Program ast = parser.Parse(program, _verbose); ast.References = _references; //Contextual analysis... ContextualAnalyzer analyzer = new ContextualAnalyzer(); analyzer.Analyze(ast, referenceManager); //Code generation... Generator generator = new Generator(ast, referenceManager); generator.GenerateIL(assemblyName); Assemble(assemblyName); } catch (CompilerException e) { Console.WriteLine("ERROR:"); Console.WriteLine(e.ToString()); if (_verbose) { Console.WriteLine("Stack trace:\n" + e.StackTrace); } } catch (System.IO.FileNotFoundException e) { Console.WriteLine("ERROR:"); Console.WriteLine("File not found: " + e.Message); Console.WriteLine(e.ToString()); } }
/// <summary> Returns a JavaCC generated Parser. /// </summary> /// <returns>Parser javacc generated parser /// </returns> public Parser.Parser CreateNewParser() { Parser.Parser parser = new Parser.Parser(this); parser.Directives = directiveManager; return(parser); }
public override string[] Build() { var options = BuildOption(); options.SourceCode = Parameters.Template; var tmp = new List<string>(); var parser = new Parser.Parser(options); string fileName; string temp; string fName; DirectoryInfo dir; switch (Parameters.FileDependency) { case FileDepend.DATABASE: parser.Options.VariableParameter = Parameters.DataBase; parser.Options.StatementParameters = new object[] { Parameters.DataBase }; temp = parser.Parse(ParserType.XCODER); fName = string.IsNullOrEmpty(Parameters.FileNameFormat) ? Parameters.DataBase.Connection.Name : string.Format(Parameters.FileNameFormat, Parameters.DataBase.Connection.Name); fileName = Path.Combine(Parameters.OutputDirectory.FullName, fName + Extension); dir = new FileInfo(fileName).Directory; if (dir != null && !dir.Exists) { dir.Create(); } if (Parameters.Override) { File.WriteAllText(fileName, temp); tmp.Add(fileName); } else { if (!File.Exists(fileName)) { File.WriteAllText(fileName, temp); } tmp.Add(fileName); } break; case FileDepend.TABLES: foreach (Table table in Parameters.DataBase.Tables) { parser.Options.VariableParameter = table; parser.Options.StatementParameters = new object[] { Parameters.DataBase, table }; temp = parser.Parse(ParserType.XCODER); fName = string.IsNullOrEmpty(Parameters.FileNameFormat) ? table.Name : string.Format(Parameters.FileNameFormat, table.Name); fileName = Path.Combine(Parameters.OutputDirectory.FullName, fName + Extension); dir = new FileInfo(fileName).Directory; if (dir != null && !dir.Exists) { dir.Create(); } if (Parameters.Override) { File.WriteAllText(fileName, temp); tmp.Add(fileName); } else { if (!File.Exists(fileName)) { File.WriteAllText(fileName, temp); } tmp.Add(fileName); } } break; default: throw new ArgumentOutOfRangeException(); } parser.Release(); return tmp.ToArray(); }
public static Completion PerformEval(IValue xValue, Realm evalRealm, bool strictCaller, bool direct) { if (!direct && strictCaller) { throw new InvalidOperationException("Indirect evals cannot have strictCaller. Spec 18.2.1.1 step 1"); } if (!(xValue is StringValue xString)) { return(Completion.NormalCompletion(xValue)); } var x = xString.@string; var thisEnvRec = Interpreter.Instance().GetThisEnvironment(); bool inFunction, inMethod, inDerivedConstructor; if (thisEnvRec is FunctionEnvironmentRecord functionEnvironmentRecord) { var F = functionEnvironmentRecord.FunctionObject; inFunction = true; inMethod = functionEnvironmentRecord.HasSuperBinding(); inDerivedConstructor = F.ConstructorKind == ConstructorKind.Derived; } else { inFunction = inMethod = inDerivedConstructor = false; } Script script; try { //TODO use the in variables to apply additional early errors // Spec 18.2.1.1 step 6 script = new Parser.Parser(x) { Strict = strictCaller }.ParseScript(); } catch (Exception e) { return(Completion.ThrowSyntaxError(e.Message)); } if (!script.scriptBody.Any()) { return(Completion.NormalCompletion(UndefinedValue.Instance)); } var strictEval = strictCaller || script.IsStrictMode; var ctx = Interpreter.Instance().RunningExecutionContext(); LexicalEnvironment lexEnv; LexicalEnvironment varEnv; if (direct) { lexEnv = ctx.LexicalEnvironment.NewDeclarativeEnvironment(); varEnv = ctx.VariableEnvironment; } else { lexEnv = evalRealm.GlobalEnv.NewDeclarativeEnvironment(); varEnv = evalRealm.GlobalEnv; } if (strictEval) { varEnv = lexEnv; } var evalCtx = new ExecutionContext(evalRealm) { VariableEnvironment = varEnv, LexicalEnvironment = lexEnv }; Interpreter.Instance().PushExecutionStack(evalCtx); var result = EvalDeclarationInstantiation(script.scriptBody, varEnv, lexEnv, strictEval); if (result.completionType == CompletionType.Normal) { result = script.scriptBody.Evaluate(Interpreter.Instance()); } if (result.completionType == CompletionType.Normal && result.value == null) { result = Completion.NormalCompletion(UndefinedValue.Instance); } Interpreter.Instance().PopExecutionStack(evalCtx); return(result); }
public void TestAlexNetSuccessParser() { var parser = new Parser.Parser(); parser.Parse(@"CaffeTests\AlexNet.prototxt"); }
private static void TestParsing() { parse.ParseTable(); parse = new SDTranslator(); parse.ParseTable(); }
public void TestErrorParser() { var parser = new Parser.Parser(); parser.Parse(@"CaffeTests\Error.prototxt"); }
public Client(HttpListenerContext Client, ServerDbContext db) { HttpListenerRequest request = Client.Request; // Объявим строку, в которой будет хранится запрос клиента string Request = ""; // Буфер для хранения принятых от клиента данных List <byte> Buffer = new List <byte>(); // Переменная для хранения количества байт, принятых от клиента int nextbyte = 0;; // Читаем из потока клиента до тех пор, пока от него поступают данные while ((nextbyte = request.InputStream.ReadByte()) != -1) { Buffer.Add((byte)nextbyte); Request += Encoding.ASCII.GetString(new byte[] { (byte)nextbyte }, 0, 1); if (Request.IndexOf("\r\n\r\n") >= 0 || Request.Length > 4096) { break; } } Request += '&'; Console.WriteLine(Request); #region Get // Парсим строку запроса с использованием регулярных выражений if (request.HttpMethod == "GET") { string Response = ""; if (request.Url.AbsolutePath.EndsWith("/")) { Response += "index.html"; } string FilePath = "www/" + Response; // Если в папке www не существует данного файла, посылаем ошибку 404 if (!File.Exists(FilePath)) { SendResult(Client, 404); return; } // Открываем файл, страхуясь на случай ошибки FileStream FS; try { FS = new FileStream(FilePath, FileMode.Open, FileAccess.Read, FileShare.Read); } catch (Exception) { // Если случилась ошибка, посылаем клиенту ошибку 500 SendResult(Client, 500); return; } HttpListenerResponse response = Client.Response; byte[] buffer = new byte[(int)FS.Length]; // Читаем данные из файла FS.Read(buffer, 0, (int)FS.Length); response.ContentLength64 = buffer.Length; response.ContentType = "text/html"; Stream output = response.OutputStream; output.Write(buffer, 0, buffer.Length); // закрываем поток output.Close(); // Закроем файл и соединение FS.Close(); Console.WriteLine("Обработка подключений завершена"); } #endregion Get #region Post if (request.HttpMethod == "POST") { Regex argument = new Regex(@"(\w*)=([^\&]*)&", RegexOptions.Compiled); MatchCollection matches = argument.Matches(Request); Dictionary <string, string> arguments = new Dictionary <string, string>(); foreach (Match item in matches) { arguments.Add(item.Groups[1].Value, item.Groups[2].Value); } switch (arguments.GetValueOrDefault("Action")) { case "Authentication": { var user = db.Users.Select(a => a).Where(a => a.Name == arguments.GetValueOrDefault("Login")).ToList <User>(); if (user.Count != 0 && user[0].Authorization(arguments.GetValueOrDefault("Password"))) { HttpServer.Server.activeUsers.Add(Client.Request.RemoteEndPoint.ToString(), user[0]); SendResult(Client, 200); } else { SendResult(Client, 400); } break; } case "Closing": { HttpServer.Server.activeUsers.Remove(Client.Request.RemoteEndPoint.ToString()); break; } case "Registration": { var user = db.Users.Select(a => a).Where(a => a.Name == arguments.GetValueOrDefault("Login")).ToList <User>(); if (user.Count != 0) { SendResult(Client, 401); } else if (Verefications.isCorrectLogin(arguments.GetValueOrDefault("Login"))) { User u = new User(arguments.GetValueOrDefault("Login"), arguments.GetValueOrDefault("Password"), arguments.GetValueOrDefault("Email")); db.SaveUser(u); SendResult(Client, 200); } else { SendResult(Client, 402); } break; } case "CalculateIntegral": { string op = "()+-*/^,"; string function = arguments.GetValueOrDefault("Func"); double left = Convert.ToDouble(arguments.GetValueOrDefault("Left").Replace($"%{Convert.ToString((int)',', 16)}", ",", ignoreCase: true, null)); double right = Convert.ToDouble(arguments.GetValueOrDefault("Right").Replace($"%{Convert.ToString((int)',', 16)}", ",", ignoreCase: true, null)); double step = Convert.ToDouble(arguments.GetValueOrDefault("Step").Replace($"%{Convert.ToString((int)',', 16)}", ",", ignoreCase: true, null)); string method = arguments.GetValueOrDefault("Method"); for (int i = 0; i < op.Length; i++) { function = function.Replace($"%{Convert.ToString((int)op[i], 16)}", op[i].ToString(), ignoreCase: true, null); } function.Replace(" ", ""); Parser.Parser parser = new Parser.Parser(function); FPtr f = new FPtr(parser.Calculate); int n = (int)((right - left) / step); double[] res = new double[1000]; double integralValue = 0; switch (method) { case "L": { integralValue = Left(res, n, f, left, right, step); break; } case "T": { integralValue = Trap(res, n, f, left, right, step); break; } case "M": { integralValue = Mid(res, n, f, left, right, step); break; } default: break; } string result = ""; int nom = 0; for (int i = 0; i < n * 4; i += 2, nom++) { result += $"X{nom}={String.Format("{0:f3}", res[i])}&Y{nom}={String.Format("{0:f3}", res[i + 1])}&"; } result += $"Value={String.Format("{0:f4}", integralValue)}&"; SendResult(Client, 200, result); break; } case "CalculateSecond": { string op = "()+-*/^,"; string function = arguments.GetValueOrDefault("Func"); double left = Convert.ToDouble(arguments.GetValueOrDefault("Left").Replace($"%{Convert.ToString((int)',', 16)}", ",", ignoreCase: true, null)); double right = Convert.ToDouble(arguments.GetValueOrDefault("Right").Replace($"%{Convert.ToString((int)',', 16)}", ",", ignoreCase: true, null)); double eps = Convert.ToDouble(arguments.GetValueOrDefault("Eps").Replace($"%{Convert.ToString((int)',', 16)}", ",", ignoreCase: true, null)); for (int i = 0; i < op.Length; i++) { function = function.Replace($"%{Convert.ToString((int)op[i], 16)}", op[i].ToString(), ignoreCase: true, null); } function.Replace(" ", ""); Parser.Parser parser = new Parser.Parser(function); FPtr f = new FPtr(parser.Calculate); int n = 0; double[] res = new double[1000]; double integralValue = 0; integralValue = MidFind(res, ref n, f, left, right, eps); string result = ""; int nom = 0; for (int i = 0; i < n; i += 2, nom++) { result += $"X{nom}={String.Format("{0:f3}", res[i])}&Y{nom}={String.Format("{0:f3}", res[i + 1])}&"; } result += $"Value={String.Format("{0:f4}", integralValue)}&"; SendResult(Client, 200, result); break; } } } #endregion Post Client.Response.Close(); }
internal void Process(string fileArg) { GetNames(fileArg); // check for file exists OpenSource(); // parse source file if (inputFile != null) { DateTime start = DateTime.Now; try { handler = new ErrorHandler(); scanner = new Scanner(inputFile); parser = new Parser.Parser(scanner); scanner.yyhdlr = handler; parser.Initialize(this, scanner, handler, new OptionParser2(ParseOption)); aast = parser.Aast; parser.Parse(); // aast.DiagnosticDump(); if (verbose) { Status(start); } CheckOptions(); if (!Errors && !ParseOnly) { // build NFSA if (ChrClasses) { DateTime t0 = DateTime.Now; partition = new Partition(TargetSymCardinality, this); partition.FindClasses(aast); partition.FixMap(); if (verbose) { ClassStatus(t0, partition.Length); } } else { CharRange.Init(TargetSymCardinality); } nfsa = new NFSA(this); nfsa.Build(aast); if (!Errors) { // convert to DFSA dfsa = new DFSA(this); dfsa.Convert(nfsa); if (!Errors) { // minimize automaton if (minimize) { dfsa.Minimize(); } if (!Errors && !checkOnly) { // emit the scanner to output file TextReader frameRdr = FrameReader(); TextWriter outputWrtr = OutputWriter(); dfsa.EmitScanner(frameRdr, outputWrtr); if (!embedBuffers) { CopyBufferCode(); } // Clean up! if (frameRdr != null) { frameRdr.Close(); } if (outputWrtr != null) { outputWrtr.Close(); } } } } } } catch (Exception ex) { string str = ex.Message; handler.AddError(str, aast.AtStart); throw; } } }
private void Execution() { Display display; Engine engine; Memory memory; Stack stack; bool ignoreNextDown = false; bool ignoreNextUp = false; bool mustReinitialize = false; // For some reason the first exception that is raised on this thread takes a long time // to propagate. It is better to raise it here, while the thread initializes, than // later, when it could cause a delay visible to the user. try { throw new Error(); } catch { } // Controls must be accessed from the thread that created them. For most controls, this // is the main thread. But the display is special, as it is mostly updated during // execution. So it is created by the execution thread. display = new Display(downKeystrokeWasEnqueued); display.Font = new System.Drawing.Font ("Quartz", 26.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((System.Byte)(0))); display.ForeColor = System.Drawing.Color.Red; display.Location = new System.Drawing.Point(8, 8); display.Name = "display"; display.Size = new System.Drawing.Size(288, 40); display.TabIndex = 0; display.AcceptKeystrokes += new Mockingbird.HP.Control_Library.Display.DisplayEvent(ExecutionAcceptKeystrokes); display.CompleteKeystrokes += new Mockingbird.HP.Control_Library.Display.DisplayEvent(ExecutionCompleteKeystrokes); main.Controls.Add(display); for (;;) { // The display is initially black, as when the calculator is powered off. display.ShowText("", 0, 0); // Create the components that depend on the display. memory = new Memory(display); program = new Program(display, reader); stack = new Class_Library.Stack(display); engine = new Engine(display, memory, program, reader, stack, downKeystrokeWasEnqueued); // We need two parsers: one that processes the MouseDown events, and one that // processes the MouseUp events, because both events have different effects for a // given key (e.g., R/S displays the next instruction when depressed, and runs the // program when released). The two parsers will go through exactly the same // productions, but they will pass a different motion indicator to the engine. downActions = new Actions(engine, KeystrokeMotion.Down); downParser = new Parser.Parser(reader, program, downActions); upActions = new Actions(engine, KeystrokeMotion.Up); upParser = new Parser.Parser(reader, program, upActions); // Notify the main thread that we are ready to process keystrokes. isInitialized.Set(); // Now wait until the main thread tells us that the calculator has been powered on. // Note that we couldn't call notifyUI here to set the display mode, because the // main window may not be built yet. The main thread will have to send a dummy // keystroke when it wants the display mode to be refreshed. PowerOn.WaitOne(); // Reinitialize the display to its power-on state. display.Digits = 2; display.Format = DisplayFormat.Fixed; display.Mode = DisplayMode.Numeric; display.Value = 0; do { ExecutionProcessKeystroke (display, engine, ref ignoreNextDown, ref ignoreNextUp, out mustReinitialize); } while (!mustReinitialize); } }
static void Main(string[] args) { if (args.Length != 1) { Console.WriteLine("Please provide input file"); Environment.Exit(-1); } string filePath = $@"{Environment.CurrentDirectory}\{args[0]}"; string fileName = Path.GetFileNameWithoutExtension(filePath); string fileDirectory = Path.GetDirectoryName(filePath); string inputText = File.ReadAllText(filePath); Lexer.Lexer lex = new Lexer.Lexer(inputText); List <Token> tokensToParse = new List <Token>(); using (StreamWriter tokenFile = new StreamWriter($@"{fileDirectory}\{fileName}.outlextokens")) using (StreamWriter tokenErrorFile = new StreamWriter($@"{fileDirectory}\{fileName}.outlexerrors")) { Token t; do { t = lex.GetNextToken(); //Console.WriteLine(t.ToString()); if (lex.IsErrorToken(t.TokenType)) { tokenErrorFile.WriteLine(t.ToString()); Console.WriteLine($"LexError: {t.ToString()}"); } else { tokenFile.WriteLine(t.ToString()); tokensToParse.Add(t); } }while (t.TokenType != TokenType.EOF); tokensToParse.RemoveAll(x => lex.IsCommentToken(x.TokenType)); Console.WriteLine("INFO: Lexing completed."); } using (StreamWriter astStream = new StreamWriter($@"{fileDirectory}\{fileName}.outast")) using (StreamWriter derivationsStream = new StreamWriter($@"{fileDirectory}\{fileName}.outderivation")) using (StreamWriter syntaxErrorStream = new StreamWriter($@"{fileDirectory}\{fileName}.outsyntaxerrors")) using (StreamWriter symbolTablesStream = new StreamWriter($@"{fileDirectory}\{fileName}.outsymboltables")) using (StreamWriter semanticErrorStream = new StreamWriter($@"{fileDirectory}\{fileName}.outsemanticerrors")) using (StreamWriter codeGenOutput = new StreamWriter($@"{fileDirectory}\{fileName}.moon")) { // Do parsing Parser.Parser parser = new Parser.Parser(tokensToParse, syntaxErrorStream, derivationsStream, astStream); Console.WriteLine(parser.Parse() ? "Parsing passed" : "Error: Parsing Failed"); var tree = parser.GetASTTree(); var printVisitor = new DOTPrinterVisitor(astStream); tree.Accept(printVisitor); astStream.Flush(); Console.WriteLine("INFO: AST Tree dumped to outast"); var symbolTableVisitor = new SymbolTableVisitor(semanticErrorStream); tree.Accept(symbolTableVisitor); Console.WriteLine("INFO: SymbolTable Generated"); var semanticCheckerVisitor = new SemanticCheckerVisitor(semanticErrorStream, symbolTableVisitor.GlobalSymbolTable); tree.Accept(semanticCheckerVisitor); Console.WriteLine("INFO: Semantic Checking Complete"); syntaxErrorStream.Flush(); semanticErrorStream.Flush(); bool hasErrors = semanticErrorStream.BaseStream.Length != 0 || syntaxErrorStream.BaseStream.Length != 0; if (hasErrors) { Console.WriteLine("Errors generated during parsing/semantic checking, terminating..."); Console.ReadKey(); Environment.Exit(-10); } // Codegen codeGenOutput.NewLine = "\n"; var codeWriter = new CodeWriter(codeGenOutput); var codeGen = new CodeGen.CodeGen(tree, symbolTableVisitor.GlobalSymbolTable, codeWriter); codeGen.GenerateCode(); symbolTablesStream.WriteLine(symbolTableVisitor.GlobalSymbolTable); Console.WriteLine("INFO: Code Generated"); Console.ReadKey(); } }
private void RunTest(ScriptTestData test, bool pack) { var result = Tokenizer.Parse(test.Script); _output.WriteLine(test.Path); _output.WriteLine(new string('-', test.Path.Length)); var tokenLength = result.Tokens.Max(t => t.TokenType.ToString().Length) + 2; var sb = new StringBuilder(); if (result.Warnings.Count > 0) { _output.WriteLine("Warnings:"); foreach (var warning in result.Warnings) { _output.WriteLine("[" + warning.Range.StartLine + ":" + warning.Range.StartColumn + "] " + warning.Message); } _output.WriteLine(""); } var tokens = result.Tokens; _output.WriteLine("Tokens:"); foreach (var token in tokens) { var type = token.TokenType.ToString(); sb.Append(type); if (token.Value != null) { sb.Append(new string(' ', tokenLength - type.Length)); sb.Append('"'); sb.Append(token.Value); sb.Append('"'); } _output.WriteLine(sb.ToString()); sb.Clear(); } if (pack) { var package = ScriptPacker.Pack(tokens); _output.WriteLine(""); _output.WriteLine("Packaged:"); _output.WriteLine(ScriptPacker.ToString(package, true)); tokens = ScriptPacker.Unpack(package); } Dictionary <string, object> context; if (test.Model == null) { context = new Dictionary <string, object>(); } else { var deserializer = new YamlDotNet.Serialization.Deserializer(); context = deserializer.Deserialize <Dictionary <string, object> >(test.Model); } var parser = new Parser.Parser(tokens); var actual = parser.Parse(context); if (test.Expected != null) { Assert.Equal(test.Expected, actual); } }
private static int Main( string[] args ) { Stream inputFile = null; Grammar grammar = null; ErrorHandler handler = new ErrorHandler(); string inputFileInfo = null; // Filename plus revision time. Lexers.Scanner scanner = null; Parser.Parser parser = null; Assembly assm = Assembly.GetExecutingAssembly(); object info = Attribute.GetCustomAttribute( assm, typeof( AssemblyFileVersionAttribute ) ); versionInfo = ((AssemblyFileVersionAttribute)info).Version; try { string filename = ProcessOptions( args ); if (filename == null) return MC_OK; try { inputFile = new FileStream( filename, FileMode.Open, FileAccess.Read, FileShare.Read ); inputFileInfo = filename + " - " + File.GetLastWriteTime( filename ).ToString(); } catch (IOException x) { string message; inputFile = null; if (x is FileNotFoundException) message = String.Format( CultureInfo.InvariantCulture, "Source file <{0}> not found{1}", filename, Environment.NewLine ); else message = String.Format( CultureInfo.InvariantCulture, "Source file <{0}> could not be opened{1}", filename, Environment.NewLine ); handler.AddError( 4, message, null ); // aast.AtStart; return MC_FILEERROR; } scanner = new Lexers.Scanner( inputFile ); scanner.SetHandler( handler ); parser = new Parser.Parser( filename, inputFileInfo, scanner, handler ); // // If the parse is successful, then process the grammar. // Otherwise just report the errors that have been listed. // if (parser.Parse()) { grammar = parser.Grammar; if (Terminal.Max > 255) handler.ListError( null, 103, CharacterUtilities.Map( Terminal.Max ), '\'' ); LALRGenerator generator = new LALRGenerator( grammar ); List<AutomatonState> states = generator.BuildStates(); generator.ComputeLookAhead(); generator.BuildParseTable(); if (!grammar.CheckGrammar( handler )) throw new ArgumentException( "Non-terminating grammar" ); // // If the grammar has non-terminating non-terms we cannot // create a diagnostic report as the grammar is incomplete. // if (!handler.Errors) { CodeGenerator code = new CodeGenerator(); code.Generate( states, grammar ); } bool DoDiagnose = Diagnose && !grammar.HasNonTerminatingNonTerms; if (Report || DoDiagnose) { string htmlName = System.IO.Path.ChangeExtension( filename, ".report.html" ); try { System.IO.FileStream htmlFile = new System.IO.FileStream( htmlName, System.IO.FileMode.Create ); System.IO.StreamWriter htmlWriter = new System.IO.StreamWriter( htmlFile ); Grammar.HtmlHeader( htmlWriter, filename ); if (Report && DoDiagnose) grammar.GenerateCompoundReport( htmlWriter, inputFileInfo, states ); else if (Report) grammar.GenerateReport( htmlWriter, inputFileInfo, states ); Grammar.HtmlTrailer( htmlWriter ); if (htmlFile != null) { htmlWriter.Flush(); htmlFile.Close(); } } catch (System.IO.IOException) { Console.Error.WriteLine( "Cannot create html output file {0}", htmlName ); } } } } catch (System.Exception e) { if (e is TooManyErrorsException) return MC_TOOMANYERRORS; Console.Error.WriteLine( "Unexpected Error {0}", e.Message ); if (NoThrowOnError) { // report the error, do not let it go into the void Console.Error.WriteLine( e ); return MC_EXCEPTION; } } finally { if (handler.Errors || handler.Warnings) handler.DumpAll( (scanner == null ? null : scanner.Buffer), Console.Error ); if ((Listing || handler.Errors || handler.Warnings) && parser != null) { string listName = parser.ListfileName; StreamWriter listStream = ListingFile( listName ); if (listStream != null) handler.MakeListing( scanner.Buffer, listStream, parser.SourceFileInfo, versionInfo ); } } return MC_OK; }
public static ProgramSyntax CreateFromText(string text) { var parser = new Parser.Parser(text); return(parser.Program()); }
private Document Parse(string text, IHost host) { Parser.Parser parser = new Parser.Parser(host); Document document; parser.Parse(text, out document); return document; }
public static SyntaxTree Parse(string text) { var parser = new Parser.Parser(text); return(parser.Parse()); }
private static string GetTagsFromKeyValFile(SyncProfile sp, string sha1Hash) { //load by using profile to get sync root //which gives us NWD/config string fileHashIndexPath = Configuration.SyncRootConfigFile(sp.Name, "FileHashIndex"); string tagIndexPath = Configuration.SyncRootConfigFile(sp.Name, "TagIndex"); string tags = ""; if (File.Exists(fileHashIndexPath) && File.Exists(tagIndexPath)) { Parser.Parser p = new Parser.Parser(); List<string> paths = new List<string>(); //get all paths matching hash (may be multiple files, if copied in multiple places) foreach (string lineItem in File.ReadLines(fileHashIndexPath)) { string path = p.Extract("path", lineItem); string hash = p.Extract("sha1Hash", lineItem); if (!string.IsNullOrWhiteSpace(path) && !string.IsNullOrWhiteSpace(hash)) { if (hash.Equals(sha1Hash, StringComparison.CurrentCultureIgnoreCase)) { paths.Add(path); } } } List<string> tagStrings = new List<string>(); //since we may have multiple files for a given hash, need to get all tags for those paths foreach (string lineItem in File.ReadLines(tagIndexPath)) { string path = p.Extract("path", lineItem); string tagString = p.Extract("tags", lineItem); if (paths.Contains(path)) { tagStrings.Add(tagString); } } //remove any duplicates HashSet<string> uniqueTags = new HashSet<string>(); foreach (string tagString in tagStrings) { var theseTags = StringToList(tagString); foreach (string tag in theseTags) { if (!uniqueTags.Contains(tag)) { uniqueTags.Add(tag); } } } tags = string.Join(", ", uniqueTags); } return tags; }
/// <summary> Parse the input and return the root of the AST node structure. /// * /// </summary> /// <param name="InputStream">inputstream retrieved by a resource loader /// </param> /// <param name="String">name of the template being parsed /// </param> /// <param name="dumpNamespace">flag to dump the Velocimacro namespace for this template /// /// </param> public SimpleNode parse(TextReader reader, String templateName, bool dumpNamespace) { SimpleNode ast = null; Parser.Parser parser = (Parser.Parser)parserPool.get(); bool madeNew = false; if (parser == null) { /* * if we couldn't get a parser from the pool * make one and log it. */ error("Runtime : ran out of parsers. Creating new. " + " Please increment the parser.pool.size property." + " The current value is too small."); parser = createNewParser(); if (parser != null) { madeNew = true; } } /* * now, if we have a parser */ if (parser != null) { try { /* * dump namespace if we are told to. Generally, you want to * do this - you don't in special circumstances, such as * when a VM is getting init()-ed & parsed */ if (dumpNamespace) { dumpVMNamespace(templateName); } ast = parser.parse(reader, templateName); } finally { /* * if this came from the pool, then put back */ if (!madeNew) { parserPool.put(parser); } } } else { error("Runtime : ran out of parsers and unable to create more."); } return(ast); }
public static GettextCatalog ParseFromStream(Stream poStream) { if (poStream == null) throw new ArgumentNullException("poStream"); GettextCatalog catalog = null; try { var lexer = new Scanner(); lexer.SetSource(poStream); var parser = new Parser.Parser(lexer); parser.Parse(); catalog = parser.Catalog; if (catalog == null) goto ret; // another parsing step catalog.ParseHeaders(); // transform all strings into internal UTF-8 representation catalog.ConvertStringsToUtf8(); // parse comments catalog.ParseComments(); // build lookup structures catalog.Finalize(); } catch (Exception e) { throw new GettextException("Parsing exception!", e); } ret: if (catalog == null) throw new GettextException("Couldn't parse the catalog. Check the syntax."); return catalog; }
/// <summary> /// Parse the current document /// </summary> private void ParseNow() { _parser = new Parser.Parser(Sci.Text, Npp.CurrentFile.Path, null, false); _parsedItems = _parser.ParsedItemsList.Where(item => !item.Flags.HasFlag(ParseFlag.FromInclude)).ToNonNullList(); }