public void ParseNext_CharacterEscapedHexValue()
        {
            var target = LexicalGrammar.GetParser();
            var source = new StringCharacterSequence("'\\x41'");

            target.Parse(source).Value.Should().Match(TokenType.Character, "'\\x41'");
        }
        public void ParseNext_UnknownBacktick()
        {
            var target = LexicalGrammar.GetParser();
            var source = new StringCharacterSequence("`");

            target.Parse(source).Value.Should().Match(TokenType.Unknown, "`");
        }
        public void ParseNext_EOF()
        {
            var target = LexicalGrammar.GetParser();
            var source = new StringCharacterSequence("");

            target.Parse(source).Value.Type.Should().Be(TokenType.EndOfInput);
        }
        public void ParseNext_WhitespaceOperator()
        {
            var target = LexicalGrammar.GetParser();
            var source = new StringCharacterSequence("  + ");

            target.Parse(source).Value.Should().Match(TokenType.Operator, "+");
        }
Beispiel #5
0
        public TranspileResult TranspileFile(string inputFileName, string outputFileName = null)
        {
            CompilationUnitNode ast;

            using (var source = new StreamCharacterSequence(inputFileName, Encoding.UTF8))
            {
                var lexicalGrammar = LexicalGrammar.GetParser();
                var tokenizer      = lexicalGrammar
                                     .ToSequence(source)
                                     .Where(t => t.Value.Type != TokenType.Comment)
                                     .Select(r => r.Value);
                var result = _grammar.CompilationUnits.Parse(tokenizer);
                ast          = result.Value;
                ast.FileName = inputFileName;
            }
            var validateResults = ast.Validate();

            if (validateResults.Any())
            {
                return(TranspileResult.ForValidationFailure(validateResults));
            }

            outputFileName = outputFileName ?? inputFileName + ".cs";
            using (var outStream = new StreamWriter(outputFileName, false))
            {
                var preamble = Formatting.GetGeneratedFilePreamble("Scoop", inputFileName);
                outStream.WriteLine(preamble);
                new CSharpTranspileVisitor(outStream).Visit(ast);
                outStream.Flush();
            }

            return(TranspileResult.ForSuccess(outputFileName));
        }
Beispiel #6
0
        /// <summary>
        /// Convenience method to handle parsing a string into an AstNode
        /// </summary>
        /// <typeparam name="TOutput"></typeparam>
        /// <param name="parser"></param>
        /// <param name="s"></param>
        /// <returns></returns>
        public static TOutput Parse <TOutput>(this IParser <Token, TOutput> parser, string s)
        {
            var tokenizer = LexicalGrammar.GetParser()
                            .ToSequence(new StringCharacterSequence(s))
                            .Select(r => r.Value);

            return(parser.Parse(tokenizer).Value);
        }
        public void ParseNext_IntegerDivideInteger()
        {
            var target = LexicalGrammar.GetParser();
            var source = new StringCharacterSequence(@"1/2");

            target.Parse(source).Value.Should().Match(TokenType.Integer, "1");
            target.Parse(source).Value.Should().Match(TokenType.Operator, "/");
            target.Parse(source).Value.Should().Match(TokenType.Integer, "2");
        }
        public void ParseNext_Word()
        {
            var target = LexicalGrammar.GetParser();
            var source = new StringCharacterSequence("test class @class");

            target.Parse(source).Value.Should().Match(TokenType.Word, "test");
            target.Parse(source).Value.Should().Match(TokenType.Word, "class");
            target.Parse(source).Value.Should().Match(TokenType.Word, "@class");
        }
Beispiel #9
0
        /// <summary>
        /// Create a new <see cref="KustoCode"/> instance from the text and globals and performs semantic analysis.
        /// </summary>
        /// <param name="text">The code text</param>
        /// <param name="globals">The globals to use for parsing and semantic analysis. Defaults to <see cref="GlobalState.Default"/></param>.
        /// <param name="cancellationToken">A <see cref="CancellationToken"/> that can be used to cancel parsing and semantic analysis.</param>
        public static KustoCode ParseAndAnalyze(string text, GlobalState globals = null, CancellationToken cancellationToken = default(CancellationToken))
        {
            if (text == null)
            {
                throw new ArgumentNullException(nameof(text));
            }

            var tokens = LexicalGrammar.GetTokens(text, alwaysProduceEndToken: true);

            return(Create(text, globals ?? GlobalState.Default, tokens, analyze: true, cancellationToken: cancellationToken));
        }
        public void ParseNext_ClassDefinition()
        {
            var target = LexicalGrammar.GetParser();
            var source = new StringCharacterSequence(@"public class MyClass { }");

            target.Parse(source).Value.Should().Match(TokenType.Word, "public");
            target.Parse(source).Value.Should().Match(TokenType.Word, "class");
            target.Parse(source).Value.Should().Match(TokenType.Word, "MyClass");
            target.Parse(source).Value.Should().Match(TokenType.Operator, "{");
            target.Parse(source).Value.Should().Match(TokenType.Operator, "}");
        }
        public void ParseNext_MultiLineComment1()
        {
            var target = LexicalGrammar.GetParser();
            var source = new StringCharacterSequence(@"
                /* This is a comment */
");
            var result = target.Parse(source).Value;

            result.Type.Should().Be(TokenType.EndOfInput);
            result.Frontmatter[1].Should().Be("/* This is a comment */");
        }
        public void ParseNext_SingleLineComment()
        {
            //Assert.Inconclusive("Comments aren't returned from the lexer right now");
            var target = LexicalGrammar.GetParser();
            var source = new StringCharacterSequence(@"
                // This is a comment
");
            var result = target.Parse(source).Value;

            result.Type.Should().Be(TokenType.EndOfInput);
            result.Frontmatter[1].Should().Be("// This is a comment");
        }
        public INode Parse(string expr)
        {
            var lexer  = LexicalGrammar.CreateParser();
            var chars  = expr.ToCharacterSequence();
            var tokens = lexer
                         .ToSequence(chars)
                         .Select(r => r.Value)
                         .Where(t => t.Type != ValueType.Whitespace);
            var parser = SExpressionGrammar.CreateParser();
            var result = parser.Parse(tokens);

            return(result.Value);
        }
        public void ParseNext_MethodDefinition()
        {
            var target = LexicalGrammar.GetParser();
            var source = new StringCharacterSequence(@"public void MyMethod() { }");

            target.Parse(source).Value.Should().Match(TokenType.Word, "public");
            target.Parse(source).Value.Should().Match(TokenType.Word, "void");
            target.Parse(source).Value.Should().Match(TokenType.Word, "MyMethod");
            target.Parse(source).Value.Should().Match(TokenType.Operator, "(");
            target.Parse(source).Value.Should().Match(TokenType.Operator, ")");
            target.Parse(source).Value.Should().Match(TokenType.Operator, "{");
            target.Parse(source).Value.Should().Match(TokenType.Operator, "}");
        }
Beispiel #15
0
        /// <summary>
        /// Determines the code kind from the text. See <see cref="CodeKinds"/>.
        /// </summary>
        public static string GetKind(string text)
        {
            var token = LexicalGrammar.GetFirstToken(text);

            if (token != null)
            {
                if (token.Kind == SyntaxKind.DotToken)
                {
                    return(CodeKinds.Command);
                }
                else if (token.Kind == SyntaxKind.DirectiveToken)
                {
                    return(CodeKinds.Directive);
                }
            }

            return(CodeKinds.Query);
        }
Beispiel #16
0
        public void ParseNext_CSharpLiteral_Simple()
        {
            var source = new StringCharacterSequence(@"c# {obj.Method();}");

            LexicalGrammar.GetParser().Parse(source).Value.Should().Match(TokenType.CSharpLiteral, "obj.Method();");
        }
Beispiel #17
0
        public static IObservable <IDictionary <string, object> > KustoQuery(this IObservable <IDictionary <string, object> > source, string query)
        {
            var kq = KustoCode.Parse(query);
            IDictionary <string, object> letValues = new Dictionary <string, object>();

            if (kq.Syntax.GetDescendants <Statement>().Count > 1)
            {
                var statementList = kq.Syntax.GetDescendants <Statement>().ToList();

                var letStatements  = statementList.Where(x => x.Kind == SyntaxKind.LetStatement);
                var queryStatement = statementList.FirstOrDefault(x => x.Kind == SyntaxKind.ExpressionStatement);
            }

            var lexicalTokens = LexicalGrammar.GetTokens(query, alwaysProduceEndToken: true);

            string[] pipeline = SplitExpressions(lexicalTokens).ToArray();
            var      result   = source;

            foreach (string p in pipeline)
            {
                string stage = p.Trim();
                int    index = stage.IndexOf(' ');
                string op    = stage.Substring(0, index);
                string args  = stage.Substring(index + 1);

                switch (op)
                {
                case "where":
                    result = result.Where(args);
                    break;

                case "limit":
                    result = result.Take(int.Parse(args));
                    break;

                case "project":
                    result = result.ProjectExpressions(args);
                    break;

                case "project-away":
                    result = result.ProjectAwayExpressions(args);
                    break;

                case "project-keep":
                    result = result.ProjectKeepExpressions(args);
                    break;

                case "evaluate":
                    result = result.Evaluate(args);
                    break;

                case "extend":
                    result = result.Extend(args);
                    break;

                case "summarize":
                    result = result.Summarize(args);
                    break;

                default:
                    throw new NotImplementedException($"KustoQuery observable does not implement the operator: {op}");
                }
            }

            return(result);
        }
Beispiel #18
0
        public void ParseNext_CSharpLiteral_Braces()
        {
            var source = new StringCharacterSequence(@"c# {if(x==2){y();}}");

            LexicalGrammar.GetParser().Parse(source).Value.Should().Match(TokenType.CSharpLiteral, "if(x==2){y();}");
        }
Beispiel #19
0
        public void ParseNext_CSharpLiteral_StringBrace()
        {
            var source = new StringCharacterSequence(@"c# {""quo}ted""}");

            LexicalGrammar.GetParser().Parse(source).Value.Should().Match(TokenType.CSharpLiteral, "\"quo}ted\"");
        }
Beispiel #20
0
        public void ParseNext_CSharpLiteral_AtStringBackslashEscapedQuote()
        {
            var source = new StringCharacterSequence(@"c# {@""quo\""""ted""}");

            LexicalGrammar.GetParser().Parse(source).Value.Should().Match(TokenType.CSharpLiteral, "@\"quo\\\"\"ted\"");
        }
Beispiel #21
0
        public void ParseNext_CSharpLiteral_Character()
        {
            var source = new StringCharacterSequence(@"c# {'x'}");

            LexicalGrammar.GetParser().Parse(source).Value.Should().Match(TokenType.CSharpLiteral, "'x'");
        }