protected bool callUserErrorHandler(ITokenMatch <SYMBOL_ENUM> inputToken, Func <string> lazyMsg) { if (IsSuccessfulParse || consecutiveCorrectActionsCount >= ConsecutiveCorrectActionsLimit) { addErrorMessage(lazyMsg()); if (SyntaxErrorAction != null) { try { Coordinates = inputToken.Coordinates; var user_messages = new List <string>(); bool user_result = SyntaxErrorAction(inputToken, user_messages); user_messages.ForEach(s => addErrorMessage(s)); return(user_result); } catch (Exception ex) { addErrorMessage("Expection was thrown in user defined syntax error handler: " + ex.Message); } } } return(true); }
private static void run(Lexer <SymbolEnum, StateEnum> lexer, Parser <SymbolEnum, object> parser, bool reporting) { while (true) { Console.Write("Ready -- enter chemical formula or press [Enter] to quit: "); string line = Console.ReadLine(); if (line == String.Empty) { break; } IEnumerable <ITokenMatch <SymbolEnum> > tokens = lexer.ScanText(line); ITokenMatch <SymbolEnum> err_token = tokens.FirstOrDefault(it => it.Token == SymbolEnum.Error); if (err_token != null) { Console.WriteLine("Incorrect symbol in input: " + err_token.Text); } else { object root = parser.Parse(tokens, new ParserOptions() { Trace = reporting }).FirstOrDefault(); if (!parser.IsSuccessfulParse) { Console.WriteLine("There were errors while parsing."); foreach (var s in parser.ErrorMessages) { Console.WriteLine(s); } } else { var elem = (Element)root; Console.WriteLine("The outcome: " + elem.ToString()); Console.WriteLine(); } if (reporting) { foreach (var s in parser.ParseLog) { Console.WriteLine(s + Environment.NewLine); } } } Console.WriteLine(); } }
private bool SyntaxError(ITokenMatch <int> cur_token, List <string> errors) { SymbolPosition coords = cur_token.Coordinates.FirstPosition; customErrors.Add("\"" + filename + "\", at (" + coords.Line + "," + coords.Column + "): syntax error at or near " + cur_token.Text); omerrs++; if (omerrs > 50) { customErrors.Add("More than 50 errors"); return(false); } return(true); }
public AltRegex GetRegex(string strRegex) { IEnumerable <ITokenMatch <SymbolEnum> > tokens = lexer.ScanText(strRegex); ITokenMatch <SymbolEnum> err_token = tokens.FirstOrDefault(it => it.Token == SymbolEnum.Error); if (err_token != null) { throw ParseControlException.NewAndRun("Invalid regex input " + (err_token.ErrorMessage != null?"(" + err_token.ErrorMessage + ")":"") + ": " + strRegex.EscapedString()); } var regex = parser.Parse(tokens, new ParserOptions()).FirstOrDefault() as AltRegex; if (regex == null) { throw ParseControlException.NewAndRun("Invalid regex syntax: " + strRegex.EscapedString()); } return(regex); }
private string read(string text, bool reporting, out Dictionary <string, object> ast) { ast = null; IEnumerable <ITokenMatch <TokenEnum> > tokens = lexer.ScanText(text); ITokenMatch <TokenEnum> err_token = tokens.FirstOrDefault(it => it.Token == TokenEnum.Error); if (err_token != null) { return("Incorrect symbol in input: " + err_token.Text); } else { object root = parser.Parse(tokens, new ParserOptions() { Trace = reporting }).FirstOrDefault(); if (reporting) { foreach (var s in parser.ParseLog) { Console.WriteLine(s + Environment.NewLine); } } if (!parser.IsSuccessfulParse) { return("There were errors while parsing." + Environment.NewLine + String.Join(Environment.NewLine, parser.ErrorMessages)); } else { ast = (Dictionary <string, object>)root; return(null); } } }
private void makeShift(ActionRecoveryEnum findAction) { if (options.Trace) { parseLog.Last.Value.Shifted = ParseHistory.Shift; } // do not advance input here, because we can fork after that ITokenMatch <SYMBOL_ENUM> input_head = stackMaster.InputHead; StackElement <SYMBOL_ENUM, TREE_NODE> added = addToStack(Command.Shifted <SYMBOL_ENUM, TREE_NODE>(), attachPoint: new AttachPoint <SYMBOL_ENUM, TREE_NODE>(lastOfStackOrNull, //this makes it compatible with anchor of reduction lastOfStackOrNull), symbol: input_head.Token, markWith: Productions.NoMark, text: input_head.Text, userObject: new Option <object>(input_head.Value), recovered: findAction == ActionRecoveryEnum.Recovered); if (options.Trace) { parseLog.Last.Value.Shifted += "[" + added.ForkId + "]"; } }
protected virtual IToken CreateToken(ITokenMatch match) { return(new Token(match.TokenType, match.Value)); }
public MatchInfo FindMatchInfo(ITokenMatch <SYMBOL_ENUM> token) { return(history.Single(it_h => it_h.Tokens.Any(it_t => it_t.Equals(token)))); }
private bool process() { Command <SYMBOL_ENUM, TREE_NODE> command = commands.First.Value; NfaCell <SYMBOL_ENUM, TREE_NODE> reduce_item = command.ReduceItem; commands.RemoveFirst(); if (reduce_item == null) // shift { ITokenMatch <SYMBOL_ENUM> input_head = tokens.Head; addToStack(advanceInput: true, symbol: input_head.Token, userObject: new Option <object>(input_head.Value), coordinates: input_head.Coordinates); } else { // here we have to use RhsSeenCount from state, not from production to use the same code as in parsers List <StackElement <SYMBOL_ENUM, TREE_NODE> > stack_tail = stack.RemoveLast(reduce_item.RhsSeenCount).ToList(); bool is_exact = true; SymbolPosition first_position; SymbolPosition last_position; { SymbolCoordinates first_coords = stack_tail.Select(it => it.Coordinates).FirstOrDefault(it => it.IsExact); if (first_coords != null) { first_position = first_coords.FirstPosition; } else { is_exact = false; first_coords = stack.Select(it => it.Coordinates).LastOrDefault(it => it.IsExact); if (first_coords != null) { first_position = first_coords.LastPosition; } else { first_position = initCoordinates.FirstPosition; } } } { SymbolCoordinates last_coords = stack_tail.Select(it => it.Coordinates).LastOrDefault(it => it.IsExact); if (last_coords != null) { last_position = last_coords.LastPosition; } else { last_position = tokens.Head.Coordinates.FirstPosition; } } var coordinates = new SymbolCoordinates(is_exact, first_position, last_position); positionUpdate(coordinates); // no value for user object -- for 2 reasons: // * in case of exception we have to pass no value further // * if stack already contains no value we have to pass no value again var user_object = new Option <object>(); // [@PARSER_USER_ACTION] // this is shortcut -- instead of passing function that returns nulls (when bulding parser) // we set user actions to null and here we can handle it if (reduce_item.ProductionUserAction == null) { // return null as user object (it is a valid value) user_object = new Option <object>(null); } else if (stack_tail.All(it => it.UserObject.HasValue)) { try { object value = reduce_item.ProductionUserAction.Code(stack_tail.Select(it => it.UserObject.Value).ToArray()); if (value is RichParseControl) { addWarningMessages(coordinates.FirstPosition.ToString(), (value as RichParseControl).Warnings); user_object = new Option <object>((value as RichParseControl).Value); } else { user_object = new Option <object>(value); } } catch (ParseControlException ex) { addErrorMessage(coordinates.FirstPosition.ToString() + ": " + ex.Message); if (!ex.ContinueOnError) { return(false); } } catch (Exception ex) { addErrorMessage("User action error in production : " + reduce_item.ProductionCoordinates + Environment.NewLine + ex.Message + Environment.NewLine + ex.ToString()); addErrorMessage(ex.StackTrace); return(false); } } addToStack(advanceInput: false, symbol: reduce_item.LhsSymbol, userObject: user_object, coordinates: coordinates); } return(true); }
internal Lexer <int, int> .MatchInfo FindMatchInfo(ITokenMatch <int> token) { return(lexer.FindMatchInfo(token)); }
private static Grammar parse(string filename, GenOptions genOptions, ParserOptions options) { var lexer = GenLexer.Create(genOptions.Bootstrap); var parser = GenParser.Create(genOptions.Bootstrap); if (lexer == null || parser == null) { return(null); } IEnumerable <ITokenMatch <int> > tokens = lexer.ScanFile(filename); ITokenMatch <int> err_token = tokens.FirstOrDefault(it => it.Token == lexer.ErrorToken); if (genOptions.ReportScanning || err_token != null) { Console.WriteLine(lexer.WriteReports("report_")); } if (err_token != null) { var err_info = lexer.FindMatchInfo(err_token); Console.WriteLine("Error token"); Console.WriteLine("Scanning error, id: " + err_token.ID + " at " + err_token.Coordinates.FirstPosition.ToString() + " state: " + err_info.StateTransStr(lexer.StatesRep) + ", starting with:"); Console.WriteLine(err_token.Text.EscapedString()); Console.WriteLine("(length: " + err_token.Text.Length + ")"); Console.WriteLine("Message: " + err_token.Value); Console.WriteLine("Context of the error: " + lexer.History.TakeWhile(it => it != err_info).TakeTail(10) .Select(it => it.ToString(lexer.SymbolsRep, lexer.StatesRep)).Join(Environment.NewLine)); return(null); } Grammar grammar = parser.Parse(filename, tokens, options); if (grammar == null) { Console.WriteLine("Parse error, more in parsing_history.txt."); } if (genOptions.ReportOther || grammar == null) { if (options.Trace) { System.IO.File.WriteAllLines("report_parsing_history.out.txt", parser.ParseHistory); } else { System.IO.File.WriteAllLines("report_parsing_history.out.txt", new[] { "Pass 'trace' option in order to trace parsing." }); } } if (grammar == null) { foreach (var s in parser.ErrorMessages()) { Console.WriteLine(s); } } foreach (var s in parser.NonErrorMessages()) { Console.WriteLine("[info] " + s); } return(grammar); }