public override IEnumerable<Token> BeginFiltering(CompilerContext context, IEnumerable<Token> tokens) { foreach (Token token in tokens) { if (!token.Term.IsSet(TermOptions.IsBrace)) { yield return token; continue; } //open brace symbol if (token.Term.IsSet(TermOptions.IsOpenBrace)) { _braces.Push(token); yield return token; continue; } //We have closing brace if (_braces.Count == 0) { yield return context.CreateErrorTokenAndReportError( token.Span.Start, token.Text, "Unmatched closing brace '{0}'", token.Text); continue; } //check match Token last = _braces.Pop(); if (last.Symbol.IsPairFor != token.Symbol) { yield return context.CreateErrorTokenAndReportError(token.Span.Start, token.Text, "Unmatched closing brace '{0}' - expected '{1}'", last.Symbol.IsPairFor.Name); continue; } //everything is ok, there's matching brace on top of the stack Token.LinkMatchingBraces(last, token); yield return token; //return this token }//foreach token yield break; }
public override Token TryMatch(CompilerContext context, ISourceStream source) { Match m = _expression.Match(source.Text, source.Position); if (!m.Success || m.Index != source.Position) return null; source.Position += m.Length; string text = source.GetLexeme(); return Token.Create(this, context, source.TokenStart, text); }
public void AnalyzeCode(AstNode astRoot, CompilerContext context) { RunAnalysisPhases(astRoot, context, CodeAnalysisPhase.Init, CodeAnalysisPhase.AssignScopes, CodeAnalysisPhase.Allocate, CodeAnalysisPhase.Binding, CodeAnalysisPhase.MarkTailCalls, CodeAnalysisPhase.Optimization); //sort errors if there are any if (context.Errors.Count > 0) context.Errors.Sort(SyntaxErrorList.ByLocation); }
EditorViewAdapterList _viewsCopy; //copy used in refresh loop; set to null when views are added/removed #endregion Fields #region Constructors public EditorAdapter(LanguageCompiler compiler) { _compiler = compiler; _context = new CompilerContext(_compiler); _context.Options |= CompilerOptions.CollectTokens | CompilerOptions.MatchBraces; _parsedSource = new ParsedSource(String.Empty, new TokenList(), null); _colorizerThread = new Thread(ColorizerLoop); _colorizerThread.IsBackground = true; _parserThread = new Thread(ParserLoop); _parserThread.IsBackground = true; }
public override Token TryMatch(CompilerContext context, ISourceStream source) { char current = source.CurrentChar; if (!LineTerminators.Contains(current)) return null; //Treat \r\n as a single terminator bool doExtraShift = (current == '\r' && source.NextChar == '\n'); source.Position++; //main shift if (doExtraShift) source.Position++; Token result = Token.Create(this, context, source.TokenStart, source.GetLexeme()); return result; }
public override IEnumerable<Token> BeginFiltering(CompilerContext context, IEnumerable<Token> tokens) { _prevLine = 0; _indents.Clear(); foreach (Token token in tokens) { if (token.Terminal == Grammar.Eof) { yield return CreateSpecialToken(Grammar.NewLine, context, token.Location); //this is necessary, because grammar rules reference newLine terminator //unindent all buffered indents if (_trackIndents) foreach (int i in _indents) yield return CreateSpecialToken(Grammar.Dedent, context, token.Location); _indents.Clear(); //return EOF token yield return token; yield break; }//if Eof //Now deal with normal, non-EOF tokens //We intercept only content tokens on new lines if (token.Terminal.Category != TokenCategory.Content || token.Location.Line == _prevLine) { yield return token; continue; } //if we are here, we have content token on new line; produce newLine token and possibly indents yield return CreateSpecialToken(Grammar.NewLine, context, token.Location); _prevLine = token.Location.Line; if (!_trackIndents) { yield return token; continue; } //Now take care of indents int currIndent = token.Location.Column; int prevIndent = _indents.Count == 0 ? 0 : _indents.Peek(); if (currIndent > prevIndent) { _indents.Push(currIndent); yield return CreateSpecialToken(Grammar.Indent, context, token.Location); } else if (currIndent < prevIndent) { //produce one or more dedent tokens while popping indents from stack while (_indents.Count > 0 && _indents.Peek() > currIndent) { _indents.Pop(); yield return CreateSpecialToken(Grammar.Dedent, context, token.Location); } if (_indents.Count == 0 || _indents.Peek() != currIndent) { yield return context.CreateErrorTokenAndReportError (token.Location, string.Empty, "Invalid dedent level, no previous matching indent found."); //TODO: add error recovery here } }//else if currIndent < prevIndent yield return token; } //foreach token }
public override Token TryMatch(CompilerContext context, ISourceStream source) { Token result; if (context.ScannerState.Value != 0) { // we are continuing in line mode - restore internal env (none in this case) context.ScannerState.Value = 0; } else { //we are starting from scratch if (!BeginMatch(context, source)) return null; } result = CompleteMatch(context, source); if (result != null) return result; //if it is LineComment, it is ok to hit EOF without final line-break; just return all until end. if (_isLineComment) return Token.Create(this, context, source.TokenStart, source.GetLexeme()); if (context.Mode == CompileMode.VsLineScan) return CreateIncompleteToken(context, source); return context.CreateErrorTokenAndReportError(source.TokenStart, string.Empty, "Unclosed comment block"); }
private Token CreateIncompleteToken(CompilerContext context, ISourceStream source) { source.Position = source.Text.Length; Token result = Token.Create(this, context, source.TokenStart, source.GetLexeme()); result.Flags |= AstNodeFlags.IsIncomplete; context.ScannerState.TokenKind = this.MultilineKind; return result; }
public virtual string GetSyntaxErrorMessage(CompilerContext context, StringSet expectedSymbolSet) { return null; //CLIrony then would construct default message }
public void Prepare(CompilerContext context, ISourceStream source) { _context = context; _caseSensitive = context.Compiler.Grammar.CaseSensitive; _source = source; _currentToken = null; _bufferedTokens.Clear(); if (_source != null) ResetSource(); }
private Token CompleteMatch(CompilerContext context, ISourceStream source) { //Find end symbol while (!source.EOF()) { int firstCharPos; if (EndSymbols.Count == 1) firstCharPos = source.Text.IndexOf(EndSymbols[0], source.Position); else firstCharPos = source.Text.IndexOfAny(_endSymbolsFirsts, source.Position); if (firstCharPos < 0) { source.Position = source.Text.Length; return null; //indicating error } //We found a character that might start an end symbol; let's see if it is true. source.Position = firstCharPos; foreach (String endSymbol in EndSymbols) { if (source.MatchSymbol(endSymbol, !Grammar.CaseSensitive)) { //We found end symbol; eat end symbol only if it is not line comment. // For line comment, leave LF symbol there, it might be important to have a separate LF token if (!_isLineComment) source.Position += endSymbol.Length; return Token.Create(this, context, source.TokenStart, source.GetLexeme()); }//if }//foreach endSymbol source.Position++; //move to the next char and try again }//while return null; //might happen if we found a start char of end symbol, but not the full endSymbol }
private Token CreateSpecialToken(Terminal term, CompilerContext context, SourceLocation location) { return Token.Create(term, context, location, string.Empty); }
//Most numbers in source programs are just one-digit instances of 0, 1, 2, and maybe others until 9 // so we try to do a quick parse for these, without starting the whole general process protected override Token QuickParse(CompilerContext context, ISourceStream source) { if (IsSet(NumberFlags.DisableQuickParse)) return null; char current = source.CurrentChar; if (char.IsDigit(current) && QuickParseTerminators.IndexOf(source.NextChar) >= 0) { int iValue = current - '0'; object value = null; switch (DefaultIntTypes[0]) { case TypeCode.Int32: value = iValue; break; case TypeCode.UInt32: value = (UInt32)iValue; break; case TypeCode.Byte: value = (byte)iValue; break; case TypeCode.SByte: value = (sbyte) iValue; break; case TypeCode.Int16: value = (Int16)iValue; break; case TypeCode.UInt16: value = (UInt16)iValue; break; default: return null; } Token token = Token.Create(context, this, source.TokenStart, current.ToString(), value); source.Position++; return token; } else return null; }
private List<ParserReturn> GetPossibleTokensViaIrony(String Input) { List<ParserReturn> tempCompletionList = new List<ParserReturn>(); #region set up of autocompletion environment _Scanner = GraphCLICompiler.Scanner; _CompilerContext = new CompilerContext(GraphCLICompiler); #endregion #region get possible tokens _SourceFile = new SourceFile(Input, "Source"); _Scanner.Prepare(_CompilerContext, _SourceFile); _CompilerContext.Tokens.Clear(); _TokenStream = _Scanner.BeginNonDetermisticScan(); tempCompletionList = GraphCLICompiler.Parser.GetPossibleTokens(_CompilerContext, _TokenStream, Input); #endregion return tempCompletionList; }
public override Token TryMatch(CompilerContext context, ISourceStream source) { return _handler(this, context, source); }
private bool BeginMatch(CompilerContext context, ISourceStream source) { //Check starting symbol if (!source.MatchSymbol(StartSymbol, !Grammar.CaseSensitive)) return false; source.Position += StartSymbol.Length; return true; }
// Override this method in language grammar if you want a custom node creation mechanism. public virtual AstNode CreateNode(CompilerContext context, object reduceAction, SourceSpan sourceSpan, AstNodeList childNodes) { return null; }
public void ReadAndExecuteCommand(String InputString) { // Read and execute commend #region Check if valid command //has to be done via split, because irony doesn't recognize whitespaces, //so "dfgfkgdfgkfd" could be detected as the command "df" with an //strange parameter if (!IsQuit && ValidCommandFromInputString(InputString)) { #endregion #region Prepare Command Execution _Scanner = GraphCLICompiler.Scanner; _CompilerContext = new CompilerContext(GraphCLICompiler); _SourceFile = new SourceFile(InputString, "Source"); _Scanner.Prepare(_CompilerContext, _SourceFile); _CompilerContext.Tokens.Clear(); _TokenStream = _Scanner.BeginNonDetermisticScan(); AstNode ExecutionTree = null; ExecutionTree = GraphCLICompiler.Parser.ParseNonDeterministic(_CompilerContext, _TokenStream); #region Checkt if valid command is complete if (ExecutionTree == null) { MarkWrongOption(InputString, GraphCLICompiler.Parser.GetCorrectElements(_CompilerContext, _TokenStream)); } else { //Carry on, the command is valid and complete #endregion ExtractOptionsFromTree(ExecutionTree); #endregion if (Commands[CurrentCommand].CLI_Output == CLI_Output.Standard) WriteLine(); #region Handle Command Execution //try //{ Stopwatch sw = new Stopwatch(); sw.Start(); // TODO: what's this doing here? //if (Parameters.Count > 0) //{ #region Execute command... if (_GraphDSSharp != null || CurrentCommand.Equals("MKFS") || CurrentCommand.Equals("MOUNT") || CurrentCommand.Equals("QUIT") || CurrentCommand.Equals("EXIT") || CurrentCommand.Equals("USEHISTORY") || CurrentCommand.Equals("SAVEHISTORY")) { Commands[CurrentCommand].Execute(_GraphDSSharp, ref CurrentPath, Parameters, InputString); //if (CommandCategory.Equals(CLICommandCategory.CLIStandardCommand)) //{ #region Handle Quit and History switch (CurrentCommand.ToUpper()) { case "QUIT": IsQuit = true; break; case "EXIT": IsQuit = true; break; case "USEHISTORY": //lets move to the right parameter ParameterEnum = Parameters.GetEnumerator(); ParameterEnum.MoveNext(); ParameterEnum.MoveNext(); switch (ParameterEnum.Current.Key) { case "default": LoadStandardHistory = true; if (!HistoryFileName.Length.Equals(0)) SaveHistory(HistoryFileName, SthMountedList); break; default: LoadStandardHistory = false; HistoryFileName = ParameterEnum.Current.Key; LoadHistoryFrom(HistoryFileName); break; } break; case "SAVEHISTORY": //lets move to the right parameter ParameterEnum = Parameters.GetEnumerator(); ParameterEnum.MoveNext(); ParameterEnum.MoveNext(); if (LoadStandardHistory) SaveHistory(ParameterEnum.Current.Key, NothingMountedList); else SaveHistory(ParameterEnum.Current.Key, SthMountedList); break; } #endregion //} } else WriteLine("Nothing mounted..."); #endregion //}//CommandArray.Length > 0 ? sw.Stop(); if (Parameters.Count > 0 && Commands[CurrentCommand].CLI_Output != CLI_Output.Short) { WriteLine("Command took {0}ms, {1:0.0} MB RAM, {2:0.0}% CPU", sw.ElapsedMilliseconds, _RAMCounter.NextValue() / 1024 / 1024, _CPUCounter.NextValue()); } //} //catch (Exception e) //{ // WriteLine("Uuups... " + e.Message); // WriteLine("StackTrace... " + e.StackTrace); //} Reset(); #endregion } } }
//This method is called if Scanner failed to produce token public virtual Token TryMatch(CompilerContext context, ISourceStream source) { return null; }
public CodeAnalysisArgs(CompilerContext context) { Context = context; Phase = CodeAnalysisPhase.Init; }
private void RunAnalysisPhases(AstNode astRoot, CompilerContext context, params CodeAnalysisPhase[] phases) { CodeAnalysisArgs args = new CodeAnalysisArgs(context); foreach (CodeAnalysisPhase phase in phases) { switch (phase) { case CodeAnalysisPhase.AssignScopes: astRoot.Scope = new Scope(astRoot, null); break; case CodeAnalysisPhase.MarkTailCalls: if (!Grammar.FlagIsSet(LanguageFlags.TailRecursive)) continue;//foreach loop - don't run the phase astRoot.Flags |= AstNodeFlags.IsTail; break; }//switch args.Phase = phase; astRoot.OnCodeAnalysis(args); }//foreach phase }
protected override void InitDetails(CompilerContext context, CompoundTokenDetails details) { base.InitDetails(context, details); details.Flags = (int) this.Flags; }
public AstNode Parse(CompilerContext context, SourceFile source) { Scanner.Prepare(context, source); context.Tokens.Clear(); //If we need to match braces then we need to ensure that we have BraceMatchFilter if (context.OptionIsSet(CompilerOptions.MatchBraces)) EnsureBraceMatchFilter(); IEnumerable<Token> tokenStream = Scanner.BeginScan(); //chain all token filters foreach (TokenFilter filter in Grammar.TokenFilters) { tokenStream = filter.BeginFiltering(context, tokenStream); } //finally, parser takes token stream and produces root Ast node int start = Environment.TickCount; AstNode rootNode = Parser.Parse(context, tokenStream); _compileTime = Environment.TickCount - start; if (context.Errors.Count > 0) context.Errors.Sort(SyntaxErrorList.ByLocation); if (rootNode != null && context.OptionIsSet(CompilerOptions.AnalyzeCode)) AnalyzeCode(rootNode, context); return rootNode; }
//Used in unit tests public static CompilerContext CreateDummy() { CompilerContext ctx = new CompilerContext(LanguageCompiler.CreateDummy()); return ctx; }