Exemplo n.º 1
0
        private Feature ParseFeatureFile(string featureFile)
        {
            using (var reader = new StreamReader(featureFile))
            {
                var parser          = new Parser();
                var dialectProvider = new AugurkDialectProvider(Options.Language);
                var tokenScanner    = new TokenScanner(reader);
                var tokenMatcher    = new TokenMatcher(dialectProvider);
                var document        = parser.Parse(tokenScanner, tokenMatcher);
                if (document.Feature == null)
                {
                    throw new InvalidOperationException("Feature file failed to parse.");
                }

                var feature = document.Feature.ConvertToFeature(dialectProvider.GetDialect(document.Feature.Language, document.Feature.Location));
                feature.SourceFilename = featureFile;

                // change directory to feature-files directory
                string savedDirectory = Directory.GetCurrentDirectory();
                Directory.SetCurrentDirectory(Path.GetDirectoryName(featureFile));

                feature.Description = ProcessDescription(feature.Description);
                foreach (var scenario in feature.Scenarios)
                {
                    scenario.Description = ProcessDescription(scenario.Description);
                }

                // reset current directory
                Directory.SetCurrentDirectory(savedDirectory);

                return(feature);
            }
        }
    public static void SysmbolResolve(FormatedLine[] lsFormatedLine)
    {
        var kScanner = new TokenScanner(lsFormatedLine);

        while (kScanner.MoveNext())
        {
            SymbolResolver.ResolveNode(kScanner.Current.m_kLinkedLeaf.Parent);
        }
        //if (token.tokenKind == LexerToken.Kind.ContextualKeyword)
        //{
        //    tokenStyle = token.text == "value" ? textBuffer.styles.parameterStyle : textBuffer.styles.keywordStyle;
        //    if (token.text == "var" && token.m_kLinkedLeaf != null && (token.m_kLinkedLeaf.ResolvedSymbol == null || token.m_kLinkedLeaf.ResolvedSymbol.kind == SymbolKind.Error))
        //        SymbolResolver.ResolveNode(token.m_kLinkedLeaf.Parent);
        //    return tokenStyle;
        //}

        //var leaf = token.m_kLinkedLeaf;
        //if (leaf != null && leaf.Parent != null)
        //{
        //    if (token.tokenKind == LexerToken.Kind.Keyword)
        //    {
        //        if ((token.text == "base" || token.text == "this") && (leaf.ResolvedSymbol == null || leaf.m_sSyntaxError != null))
        //            SymbolResolver.ResolveNode(leaf.Parent);
        //    }
        //    else if (token.tokenKind == LexerToken.Kind.Identifier)
        //    {
        //        if (leaf.ResolvedSymbol == null || leaf.m_sSyntaxError != null)
        //            SymbolResolver.ResolveNode(leaf.Parent);

        //    }
        //}
    }
    public SyntaxTreeNode_Leaf AddToken(TokenScanner pTokenScanner)
    {
        if (NumValidNodes < nodes.Count)
        {
            var reused = nodes[NumValidNodes] as SyntaxTreeNode_Leaf;
            if (reused != null && reused.TryReuse(pTokenScanner))
            {
                ++NumValidNodes;
                return(reused);
            }
        }
        var leaf = new SyntaxTreeNode_Leaf(pTokenScanner)
        {
            Parent = this, m_iChildIndex = NumValidNodes
        };

        if (NumValidNodes == nodes.Count)
        {
            nodes.Add(leaf);
            ++NumValidNodes;
        }
        else
        {
            nodes.Insert(NumValidNodes++, leaf);
            for (var i = NumValidNodes; i < nodes.Count; ++i)
            {
                ++nodes[i].m_iChildIndex;
            }
        }

        return(leaf);
    }
Exemplo n.º 4
0
        /// <summary>
        /// Scan for the next token in the input stream.
        /// </summary>
        /// <returns>The next token in the input stream</returns>
        Token Scan()
        {
            Token result = null;

            do
            {
                Fetch();
            } while (EatWhite());
            if (buffer == null)
            {
                result = new TokenScanner(State, TokenEOT, Position);
            }
            else
            {
                Scan(ref result);
            }
            if (result == null)
            {
                if (ScanNewlines && (buffer[column] == NEWLINE))
                {
                    result = new TokenScanner(State, TokenEOT, Position);
                    column++;
                }
                else
                {
                    result = new TokenScanner(State, TokenError, Position);
                    State  = StateError;
                }
            }
            return(result);
        }
Exemplo n.º 5
0
    public LR_SyntaxTree Build()
    {
        if (!TokenScanner.MoveNext())
        {
            return(null);
        }

        var kSyntexTree = new LR_SyntaxTree();

        SyntaxRule_Cur = kSyntexTree.root = new SyntaxTreeNode_Rule(Parser.ParseRoot.RootID);
        ParseNode_Cur  = Parser.ParseRoot.Rule_Start.Parse(this);

        SyntaxRule_Err = SyntaxRule_Cur;
        ParseNode_Err  = ParseNode_Cur;

        while (ParseNode_Cur != null)
        {
            if (!ParseStep())
            {
                break;
            }
        }

        return(kSyntexTree);
    }
Exemplo n.º 6
0
        public static FeatureFileInfo ParseFile(string file)
        {
            var     parser = new Parser <object>();
            Feature feature;

            using (var reader = new StreamReader(file)) {
                var ts = new TokenScanner(reader);
                try {
                    feature = (Feature)parser.Parse(ts);
                } catch (CompositeParserException ex) {
                    _errorFiles.Add(new ErrorInfo {
                        FilePath           = file,
                        CompositeException = ex
                    });
                    return(null);
                } catch (NullReferenceException ex) {
                    _errorFiles.Add(new ErrorInfo {
                        FilePath  = file,
                        Exception = ex
                    });
                    return(null);
                }
                ts = null;
            }
            var steps = (from scenario in feature.ScenarioDefinitions
                         from step in scenario.Steps
                         let exampleSteps = GetSteps(step, scenario)
                                            from s in exampleSteps
                                            select s).ToList();
            var featureInfo = new FeatureFileInfo(file, steps);

            return(featureInfo);
        }
Exemplo n.º 7
0
 /// <summary>
 /// Look at the next token in the input stream.
 /// </summary>
 /// <returns>The next token in the input stream</returns>
 public Token PeekToken()
 {
     if (lookahead == null)
     {
         lookahead = (TokenScanner)Scan();
     }
     return(lookahead);
 }
Exemplo n.º 8
0
    public static void SysmbolResolve(FormatedLine[] lsFormatedLine)
    {
        var kScanner = new TokenScanner(lsFormatedLine);

        while (kScanner.MoveNext())
        {
            SymbolResolver.ResolveNode(kScanner.Current.m_kLinkedLeaf.Parent);
        }
    }
Exemplo n.º 9
0
        bool CheckSymbol(TokenId sysmbol, Regex re, ref Token output)
        {
            Match match;
            var   result = (match = re.Match(buffer, column)).Success;

            if (result)
            {
                var value = match.Groups["text"].Value;
                output  = new TokenScanner(State, sysmbol, Position, value);
                column += value.Length;
            }
            return(result);
        }
        public void TokenScanner_CppStyleComment()
        {
            string inputQuery   = @"EVALUATE T -- Comment";
            var    tokenScanner = new TokenScanner(inputQuery, 0);
            var    enumerator   = tokenScanner.GetEnumerator();

            VerifyNextToken(enumerator, Tokens.KWEVALUATE);
            VerifyNextToken(enumerator, Tokens.LEX_WHITE);
            VerifyNextToken(enumerator, Tokens.TABLENAME);
            VerifyNextToken(enumerator, Tokens.LEX_WHITE);
            VerifyNextToken(enumerator, Tokens.LEX_COMMENT);

            //? Why there is nothing after the comment?
            Assert.IsFalse(enumerator.MoveNext());
        }
Exemplo n.º 11
0
        /// <summary>
        /// Consume the next token in de input stream.
        /// </summary>
        /// <returns>The next token in the input stream.</returns>
        public Token GetToken()
        {
            Token result;

            if (lookahead == null)
            {
                result = Scan();
            }
            else
            {
                result    = lookahead;
                lookahead = null;
            }
            return(result);
        }
Exemplo n.º 12
0
    public SyntaxTreeBuilder_CSharp(TokenScanner tokenScanner, Scope_CompilationUnit pComplilationUnitScop)
    {
        m_kTokenScanner      = tokenScanner;
        ComplilationUnitScop = pComplilationUnitScop;
        tokenScanner.Reset();
        tokenScanner.OnTokenMoveAt = OnTokanMoveAt;
        tokenScanner.EOF           = new LexerToken(LexerToken.Kind.EOF, string.Empty)
        {
            tokenId = m_kParser.tokenEOF
        };

        //lines = formatedLines;
        //if (EOF == null)
        //    EOF = new LexerToken(LexerToken.Kind.EOF, string.Empty) { tokenId = parser.tokenEOF };
    }
    public bool TryReuse(TokenScanner pTokenScanner)
    {
        if (token == null)
        {
            return(false);
        }
        var current = pTokenScanner.Current;

        if (current.m_kLinkedLeaf == this)
        {
            token.m_kLinkedLeaf = this;
            return(true);
        }
        return(false);
    }
        public void TokenScanner_CStyleCommentOneLine()
        {
            string inputQuery   = @"EVALUATE T /* */";
            var    tokenScanner = new TokenScanner(inputQuery, 0);
            var    enumerator   = tokenScanner.GetEnumerator();

            VerifyNextToken(enumerator, Tokens.KWEVALUATE);
            VerifyNextToken(enumerator, Tokens.LEX_WHITE);
            VerifyNextToken(enumerator, Tokens.TABLENAME);
            VerifyNextToken(enumerator, Tokens.LEX_WHITE);
            VerifyNextToken(enumerator, Tokens.LEX_COMMENT);

            Assert.IsTrue(enumerator.MoveNext());
            Assert.IsFalse(enumerator.MoveNext());
        }
        public void Compile(IPositionableInputStream input, TextWriter output, ILog log)
        {
            this.log = log;

            this.offsetHistory    = new Stack <int> ();
            this.protectedRegions = new List <Tuple <int, int> > ();

            this.scopeStructures = new Dictionary <IExpression <int>, ScopeStructure <int> > ();

            TokenScanner tokenScanner = new TokenScanner(input);

            if (!tokenScanner.MoveNext())
            {
                return;
            }

            Match <Token>     match;
            IExpression <int> expression = parser.Parse(tokenScanner, out match);

            if (!match.Success)
            {
                log.AddError(match.Error);
                return;
            }

            if (!tokenScanner.IsAtEnd && tokenScanner.Current.Type != TokenType.EndOfStream)
            {
                AddNotReachedEnd(tokenScanner.Current);
                return;
            }

            if (log.ErrorCount == 0)
            {
                this.currentOffset = 0;
                // DeclareExternASMCLabels(ExecuteLayoutPass<TextWriter> (expression, null,output), output);
                ExecuteLayoutPass <TextWriter>(expression, null, output);
            }

            if (log.ErrorCount == 0)
            {
                this.currentOffset = 0;
                ExecuteWritePass(output, expression, null);
            }
        }
Exemplo n.º 16
0
        private static void BerpGherkinParserTest(Parser parser, string file)
        {
            Feature feature;

            using (StreamReader reader = new StreamReader(file))
            {
                var scanner = new TokenScanner(new FastSourceReader(reader));
                //var scanner = new TokenScanner(new DefaultSourceReader(reader));
                feature = (Feature)parser.Parse(scanner);
            }

#if DEBUG
            //Console.WriteLine(File.ReadAllText(file));
            //Console.WriteLine("--------------");

            var formatter = new FeatureFormatter();
            Console.WriteLine(formatter.GetFeatureText(feature));
#endif
        }
        public void TokenScanner_OneLine()
        {
            string inputQuery   = @"EVALUATE ROW(""a"", 1)";
            var    tokenScanner = new TokenScanner(inputQuery, 0);
            var    enumerator   = tokenScanner.GetEnumerator();

            VerifyNextToken(enumerator, Tokens.KWEVALUATE);
            VerifyNextToken(enumerator, Tokens.LEX_WHITE);
            VerifyNextToken(enumerator, Tokens.FUNCTION);
            VerifyNextToken(enumerator, (Tokens)'(');
            VerifyNextToken(enumerator, Tokens.STRING);
            VerifyNextToken(enumerator, (Tokens)',');
            VerifyNextToken(enumerator, Tokens.LEX_WHITE);
            VerifyNextToken(enumerator, Tokens.NUMBER);
            VerifyNextToken(enumerator, (Tokens)')');

            Assert.IsTrue(enumerator.MoveNext());
            Assert.IsFalse(enumerator.MoveNext());
        }
Exemplo n.º 18
0
        protected override void Scan(ref Token result)
        {
            switch (State)
            {
            case States.NORMAL:
                switch (buffer[column])
                {
                case '[': SetSymbol(TokenId.BRACKET_OPEN, ref result); break;

                case ']': SetSymbol(TokenId.BRACKET_CLOSE, ref result); break;

                case '(': SetSymbol(TokenId.PARENTESES_OPEN, ref result); break;

                case ')': SetSymbol(TokenId.PARENTESES_CLOSE, ref result); break;

                case '^': SetSymbol(TokenId.PWR, ref result); break;

                case '/': SetSymbol(TokenId.DIV, ref result); break;

                case '#': SetSymbol(TokenId.NAME, ref result, "#"); break;

                default:
                    if (CheckSymbol(TokenId.INT, INT, ref result))
                    {
                        break;
                    }
                    CheckSymbol(TokenId.NAME, NAME, ref result);
                    break;
                }
                break;

            case States.ERROR:
                result = new TokenScanner(State, TokenId._ERROR_, Position, "Scanner no token recognised.");
                break;

            default:
                result = new TokenScanner(State, TokenId._ERROR_, Position, $"Scanner state not recognised: {State}");
                SetState((int)States.ERROR);
                break;
            }
        }
Exemplo n.º 19
0
        public void Assemble(IPositionableInputStream input, BinaryWriter output, ILog log)
        {
            var assemblyContext = new AssemblyContext <T>(log, output);

            var scanner = new TokenScanner(input);

            if (!scanner.MoveNext())
            {
                return;
            }

            Match <Token> match;
            var           tree = parser.Parse(scanner, out match);

            if (!match.Success)
            {
                log.AddError(match.Error);// + " " + inputStream.PeekOriginalLine()
                return;
            }

            if (scanner.IsAtEnd)
            {
                log.AddError("Consumed all input. Shouldn't have happened.");
                return;
            }

            if (scanner.Current.Type != TokenType.EndOfStream)
            {
                log.AddError(scanner.Current.Position + ": Didn't reach end, currently at " + scanner.Current);
                return;
            }
            assemblyContext.CurrentOffset = (int)output.BaseStream.Position;
            foreach (var item in FirstPass(tree, assemblyContext))
            {
                assemblyContext.AddCodeData(item.code, item.offset, item.template);
            }
            assemblyContext.CurrentOffset = (int)output.BaseStream.Position;
            SecondPass(tree, assemblyContext);
        }
 public SyntaxTreeNode_Leaf(TokenScanner pTokenScanner)
 {
     token = pTokenScanner.Current;
     token.m_kLinkedLeaf = this;
 }
Exemplo n.º 21
0
 void SetSymbol(TokenId symbol, ref Token output, string value = null)
 {
     output  = new TokenScanner(State, symbol, Position, value);
     column += symbol.Text(as_symbol: true).Length;
 }
Exemplo n.º 22
0
    public bool ParseStep()
    {
        if (ParseNode_Cur == null)
        {
            return(false);
        }

        var token = TokenScanner.Current;

        if (ErrorMessage == null)
        {
            while (ParseNode_Cur != null)
            {
                ParseNode_Cur = ParseNode_Cur.Parse(this);
                if (ErrorMessage != null || token != TokenScanner.Current)
                {
                    break;
                }
            }

            if (ErrorMessage == null && token != TokenScanner.Current)
            {
                SyntaxRule_Err = SyntaxRule_Cur;
                ParseNode_Err  = ParseNode_Cur;
            }
        }
        if (ErrorMessage != null)
        {
            if (token.tokenKind == LexerToken.Kind.EOF)
            {
                return(false);
            }

            var missingParseTreeNode = SyntaxRule_Cur;
            var missingParseNode     = ParseNode_Cur;

            SyntaxRule_Cur = SyntaxRule_Err;
            ParseNode_Cur  = ParseNode_Err;
            if (SyntaxRule_Cur != null)
            {
                var cpt = SyntaxRule_Cur;
                for (var i = cpt.NumValidNodes; i > 0 && !cpt.ChildAt(--i).HasLeafs();)
                {
                    cpt.InvalidateFrom(i);
                }
            }

            if (ParseNode_Cur != null)
            {
                int numSkipped;
                ParseNode_Cur = ParseNode_Cur.Recover(this, out numSkipped);
            }
            if (ParseNode_Cur == null)
            {
                if (token.m_kLinkedLeaf != null)
                {
                    token.m_kLinkedLeaf.ReparseToken();
                }
                new SyntaxTreeNode_Leaf(TokenScanner);

                if (cachedErrorParseNode == ParseNode_Err)
                {
                    token.m_kLinkedLeaf.m_sSyntaxError = cachedErrorMessage;
                }
                else
                {
                    token.m_kLinkedLeaf.m_sSyntaxError = "Unexpected token! Expected " + ParseNode_Err.FirstSet.ToString(Parser.ParseRoot);
                    cachedErrorMessage   = token.m_kLinkedLeaf.m_sSyntaxError;
                    cachedErrorParseNode = ParseNode_Err;
                }

                ParseNode_Cur  = ParseNode_Err;
                SyntaxRule_Cur = SyntaxRule_Err;

                if (!TokenScanner.MoveNext())
                {
                    return(false);
                }
                ErrorMessage = null;
            }
            else
            {
                if (missingParseNode != null && missingParseTreeNode != null)
                {
                    SyntaxRule_Cur = missingParseTreeNode;
                    ParseNode_Cur  = missingParseNode;
                }

                InsertMissingToken(ErrorMessage ?? ("Expected " + missingParseNode.FirstSet.ToString(Parser.ParseRoot)));

                if (missingParseNode != null && missingParseTreeNode != null)
                {
                    ErrorMessage   = null;
                    ErrorToken     = null;
                    SyntaxRule_Cur = missingParseTreeNode;
                    ParseNode_Cur  = missingParseNode;
                    ParseNode_Cur  = missingParseNode.parent.NextAfterChild(missingParseNode, this);
                }
                ErrorMessage = null;
                ErrorToken   = null;
            }
        }

        return(true);
    }
Exemplo n.º 23
0
        // multiline scan with nesting.
        protected Token ScanMultiline(object id, string delimiter_open, string delimiter_close, bool ignoreCase = false)
        {
            Token result = null;

            if (string.IsNullOrEmpty(delimiter_close))
            {
                result = new TokenScanner(State, id, Position);
            }
            else
            {
                if (string.IsNullOrEmpty(delimiter_open))
                {
                    delimiter_open = null;
                }

                var nests = 0;
                var p     = Position;
                var value = new StringBuilder();
                var start = column;
                do
                {
                    if (buffer.Length <= column)
                    {
                        if (start < column)
                        {
                            value.Append(buffer.Substring(start));
                        }
                        if (!IncludeNewlines)
                        {
                            value.Append(NEWLINE);
                        }
                        column = buffer.Length;
                        Fetch();
                        if (buffer == null)
                        {
                            result = new TokenScanner(State, TokenError, p, $"Multiline token: In '{id}'...; found EOT before a closing delimiter '{delimiter_close}'");
                        }
                    }
                    else if (
                        (delimiter_open != null) &&
                        (
                            !ignoreCase && (buffer[column] == delimiter_open[0]) ||
                            ignoreCase && (char.ToUpperInvariant(buffer[column]) == char.ToUpperInvariant(delimiter_open[0]))
                        ) &&
                        (delimiter_open.Length <= buffer.Length - column) &&
                        (string.Compare(buffer.Substring(column, delimiter_open.Length), delimiter_open, ignoreCase) == 0)
                        )
                    {
                        nests++;
                        column += delimiter_open.Length;
                    }
                    else if (
                        (
                            !ignoreCase && (buffer[column] == delimiter_close[0]) ||
                            ignoreCase && (char.ToUpperInvariant(buffer[column]) == char.ToUpperInvariant(delimiter_close[0]))
                        ) &&
                        (delimiter_close.Length <= buffer.Length - column) &&
                        (string.Compare(buffer.Substring(column, delimiter_close.Length), delimiter_close, ignoreCase) == 0)
                        )
                    {
                        if (0 < nests)
                        {
                            nests--;
                            column += delimiter_open.Length;
                        }
                        else
                        {
                            var l = column - start;
                            if (0 < l)
                            {
                                value.Append(buffer.Substring(start, l));
                            }
                            column += delimiter_close.Length;
                            result  = new TokenScanner(State, id, p, value.ToString());
                        }
                    }
                    else
                    {
                        column++;
                    }
                }while (result == null);
            }
            return(result);
        }