Example #1
0
        public void line_of_position()
        {
            var nl = new[] { 2, 5, 7, 11, 13, 17, 19, 23 };
            var r  = new LexerResult <int>("", new LexerToken <int> [0], nl, false);

            var l = 1;
            var c = 1;

            for (var p = 0; p < 25; ++p)
            {
                r.LineOfPosition(p, out var line, out var column);
                Assert.AreEqual(l, line, $"line at {p}");
                Assert.AreEqual(c, column, $"column at {p}");

                if (nl.Contains(p))
                {
                    l += 1;
                    c  = 1;
                }
                else
                {
                    c += 1;
                }
            }
        }
Example #2
0
        private void InternalRun(string code)
        {
            // Convert code to tokens
            LexerResult <TokenType> result = lexer.Tokenize(code);

            // Error out if lexer fails
            if (result.IsError)
            {
                WriteLineColor($"Parsing Error: {result.Error}", Red);
                return;
            }

            // Get tokens
            TokenList tokens = result.Tokens;

            // Build tree
            InterpreterState state = new InterpreterState();

            InterpreterState.Init(ref state);
            RootNode root = BuildTree <RootNode>(new Queue <Token <TokenType> >(tokens), ref state);

#if DEBUG
            root.Dump();
#endif

            // Run
            root.Run(ref state);
        }
Example #3
0
        private bool SkipScope(int leftToken, int rightToken)
        {
            if (_current.Token != leftToken)
            {
                throw new ArgumentException("Invalid use of SkipBlock method, current token should equal left token parameter");
            }

            var scope = new Stack <int>();

            scope.Push(1);

            while (_current.Token != Token.Eof)
            {
                _current = _lexer.GetToken();

                if (_current.Token == leftToken)
                {
                    scope.Push(1);
                }

                if (_current.Token == rightToken)
                {
                    scope.Pop();
                }

                if (scope.Count == 0)
                {
                    return(true);
                }
            }

            return(false);
        }
Example #4
0
        private List <RegionResult> GetRegionBlocks()
        {
            var result = new List <RegionResult>();

            while (true)
            {
                RegionResult region;
                switch (_current.Token)
                {
                case Token.Eof: return(result);

                case Token.Do:     // do-while has two blocks
                    region   = ParseBlock();
                    _current = _lexer.GetToken();
                    if (_current.Token == Token.While)
                    {
                        var whileRegion = ParseBlock();
                        var doWhile     = region.Combine(whileRegion);
                        result.Add(doWhile);
                        _current = _lexer.GetToken();
                        break;
                    }
                    result.Add(region);
                    break;

                default:
                    region = ParseBlock();
                    result.Add(region);
                    _current = _lexer.GetToken();
                    break;
                }
            }
        }
Example #5
0
        public void Parser_Place_ExtraParameters_OK()
        {
            // Arrange
            var lexerResult = new LexerResult
            {
                CommandToken    = "PLACE",
                ParameterTokens = new[]
                {
                    "1",
                    "2",
                    "eASt",
                    "FOO"
                }
            };
            var textWriter = new StringWriter();

            // Act
            var command = CommandInterpreter.Parser(lexerResult, textWriter);

            // Assert
            Assert.NotNull(command);
            Assert.IsType <PlaceCommand>(command);
            Assert.Equal(1, ((PlaceCommand)command).X);
            Assert.Equal(2, ((PlaceCommand)command).Y);
            Assert.Equal(CompassPoint.East, ((PlaceCommand)command).Heading);
        }
Example #6
0
        public void get_string_pos()
        {
            var tokens = new[]
            {
                new LexerToken <int>(0, 0, 3),
                new LexerToken <int>(0, 4, 1),
                new LexerToken <int>(0, 5, 0),
                new LexerToken <int>(0, 8, 3),
                new LexerToken <int>(0, 11, 0)
            };

            var r = new LexerResult <int>(
                buffer: "foo +\n  bar",
                tokens: tokens,
                newlines: new[] { 5 },
                hasInvalidTokens: true);

            Assert.AreEqual("foo", r.GetStringPos(0).Value);
            Assert.AreEqual(new SourceSpan(new SourceLocation(0, 1, 1), 3), r.GetStringPos(0).Location);

            Assert.AreEqual("+", r.GetStringPos(1).Value);
            Assert.AreEqual(new SourceSpan(new SourceLocation(4, 1, 5), 1), r.GetStringPos(1).Location);

            Assert.AreEqual("", r.GetStringPos(2).Value);
            Assert.AreEqual(new SourceSpan(new SourceLocation(5, 1, 6), 0), r.GetStringPos(2).Location);

            Assert.AreEqual("bar", r.GetStringPos(3).Value);
            Assert.AreEqual(new SourceSpan(new SourceLocation(8, 2, 3), 3), r.GetStringPos(3).Location);

            Assert.AreEqual("", r.GetStringPos(4).Value);
            Assert.AreEqual(new SourceSpan(new SourceLocation(11, 2, 6), 0), r.GetStringPos(4).Location);
        }
Example #7
0
        public void Parser_TextWriterNull_ThrowsArgumentNullException()
        {
            // Arrange
            var lexerResult = new LexerResult();

            // Act & Assert
            Assert.Throws <ArgumentNullException>("textWriter", () => CommandInterpreter.Parser(lexerResult, null));
        }
Example #8
0
        /// <summary>
        /// Set the lexer for this file. This should only be ran internally from the lexer itself.
        /// </summary>
        /// <param name="result">The result of the lexer</param>
        public void SetLexer(LexerResult result)
        {
            if (_lexer != null)
            {
                // TODO: fix these exceptions as custom errors
                throw new ArgumentException("Attempted to set the lexer multiple times");
            }

            _lexer = result;
        }
Example #9
0
        private RegionResult ParseBlock()
        {
            var start = _current.Start;

            // first token is Left curly bracket.
            bool block = _current.Token == Token.LeftBracket;

            while (_current.Token != Token.Eof)
            {
                _current = _lexer.GetToken();

                if ((!block && _current.Token == Token.SemiColon) ||
                    (block && _current.Token == Token.RightBracket) ||
                    _current.Token == Token.Eof)
                {
                    return(new RegionResult
                    {
                        Offset = start,
                        Length = _current.End - start
                    });
                }

                if (_current.Token == Token.LeftParenthese)
                {
                    var isComplete = SkipScope(Token.LeftParenthese, Token.RightParenthese);
                    if (_current.Token == Token.Eof)
                    {
                        return(new RegionResult
                        {
                            Offset = start,
                            Length = _current.End - start,
                            IsCompleteBlock = isComplete
                        });
                    }

                    continue;
                }

                if (_current.Token == Token.LeftBracket)
                {
                    bool isComplete = SkipScope(Token.LeftBracket, Token.RightBracket);
                    return(new RegionResult
                    {
                        Offset = start,
                        Length = _current.End - start,
                        IsCompleteBlock = isComplete
                    });
                }
            }

            throw new InvalidOperationException(string.Format("{0} should never reach this point.", typeof(RegionParser).Name));
        }
Example #10
0
        public void Parser_LexerResultCommandTokenEmpty_ThrowsArgumentException()
        {
            // Arrange
            var lexerResult = new LexerResult
            {
                CommandToken    = string.Empty,
                ParameterTokens = new string[0]
            };
            var textWriter = new StringWriter();

            // Act & Assert
            Assert.Throws <ArgumentException>("lexerResult", () => CommandInterpreter.Parser(lexerResult, textWriter));
        }
Example #11
0
        public void Parser_LexerResultParametersNull_ThrowsArgumentException()
        {
            // Arrange
            var lexerResult = new LexerResult
            {
                CommandToken    = "LEFT",
                ParameterTokens = null
            };
            var textWriter = new StringWriter();

            // Act & Assert
            Assert.Throws <ArgumentException>("lexerResult", () => CommandInterpreter.Parser(lexerResult, textWriter));
        }
Example #12
0
        public void Parser_UnknownCommandToken_ThrowsException()
        {
            // Arrange
            var lexerResult = new LexerResult
            {
                CommandToken    = "FOO",
                ParameterTokens = new string[0]
            };
            var textWriter = new StringWriter();

            // Act & Assert
            Assert.Throws <Exception>(() => CommandInterpreter.Parser(lexerResult, textWriter));
        }
Example #13
0
        /// <summary>
        /// Receive LexerResult and Parse asynchronously, order of Index will be checked.
        /// </summary>
        /// <param name="lexerResult">The Result of Lexer</param>
        public async Task ParseLexUnitAsync(LexerResult lexerResult)
        {
            //check the order
            if (lexerResult.Index != index)
            {
                throw new ParseIndexException(index, lexerResult.Index,
                                              "Index Error: Index should be " + index + " instead of " + lexerResult.Index);
            }
            //transfer to parse unit
            ParseUnit parseUnit = new ParseUnit(lexerResult.Name, null, lexerResult.Position, lexerResult.Value, null);

            //to parse it
            index++;// move to next for sync
            await JoinParse(parseUnit, lexerResult.Index);
        }
Example #14
0
        public void Parser_Place_InsufficientParameters_ThrowsException()
        {
            // Arrange
            var lexerResult = new LexerResult
            {
                CommandToken    = "PLACE",
                ParameterTokens = new[]
                {
                    "FOO"
                }
            };
            var textWriter = new StringWriter();

            // Act & Assert
            Assert.Throws <Exception>(() => CommandInterpreter.Parser(lexerResult, textWriter));
        }
Example #15
0
        /// <summary>
        /// Receive LexerResult and Parse synchronously, order of Index will be check.
        /// </summary>
        /// <param name="lexerResult">The Result of Lexer</param>
        public void ParseLexUnit(LexerResult lexerResult)
        {
            //check the order
            if (lexerResult.Index != index)
            {
                throw new ParseIndexException(index, lexerResult.Index,
                                              "Index Error: Index should be " + index + " instead of " + lexerResult.Index);
            }
            //transfer to parse unit
            ParseUnit parseUnit = new ParseUnit(lexerResult.Name, null, lexerResult.Position, lexerResult.Value, null);

            //to parse it
            index++;      // move to next for sync
            door.GetIn(); // move to next for async, but no use in here.
            Parse(parseUnit);
        }
Example #16
0
        public void Parser_Move_CaseInsensitive_OK()
        {
            // Arrange
            var lexerResult = new LexerResult
            {
                CommandToken    = "mOVe",
                ParameterTokens = new string[0]
            };
            var textWriter = new StringWriter();

            // Act
            var command = CommandInterpreter.Parser(lexerResult, textWriter);

            // Assert
            Assert.NotNull(command);
            Assert.IsType <MoveCommand>(command);
        }
Example #17
0
        public void Parser_Right_OK()
        {
            // Arrange
            var lexerResult = new LexerResult
            {
                CommandToken    = "RIGHT",
                ParameterTokens = new string[0]
            };
            var textWriter = new StringWriter();

            // Act
            var command = CommandInterpreter.Parser(lexerResult, textWriter);

            // Assert
            Assert.NotNull(command);
            Assert.IsType <RightCommand>(command);
        }
Example #18
0
        public void Parser_Report_CaseInsensitive_OK()
        {
            // Arrange
            var lexerResult = new LexerResult
            {
                CommandToken    = "rEPORt",
                ParameterTokens = new string[0]
            };
            var textWriter = new StringWriter();

            // Act
            var command = CommandInterpreter.Parser(lexerResult, textWriter);

            // Assert
            Assert.NotNull(command);
            Assert.IsType <ReportCommand>(command);
            Assert.Same(textWriter, ((ReportCommand)command).TextWriter);
        }
Example #19
0
        public void Parser_Place_SecondParameterWrongType_ThrowsException()
        {
            // Arrange
            var lexerResult = new LexerResult
            {
                CommandToken    = "PLACE",
                ParameterTokens = new[]
                {
                    "1",
                    "FOO",
                    "EAST"
                }
            };
            var textWriter = new StringWriter();

            // Act & Assert
            Assert.Throws <Exception>(() => CommandInterpreter.Parser(lexerResult, textWriter));
        }
Example #20
0
        static void Main(string[] args)
        {
            string input;

            do
            {
                try
                {
                    Console.Clear();
                    Console.Write("Zadajte vzorec:");
                    input = Console.ReadLine();
                    ILexer        lexer        = Setup.Instance.GetLexer();
                    ILexerContext lexerContext = Setup.Instance.CreateLexerContext(input);
                    LexerResult   lexerResult  = lexer.Evaluate(lexerContext);

                    IParser        parser        = Setup.Instance.GetParser();
                    IParserContext parserContext = Setup.Instance.CreateParserContext(lexerResult.PostfixNotation);
                    ParserResult   parserResult  = parser.Evaluate(parserContext);

                    Visitor evaluator      = new Evaluator(Setup.Instance.CreateEvaluationContext());
                    Visitor differentiator = new Differentiator(SyntaxToken.Variable("x"));

                    Console.WriteLine($"Vstup: {input}");
                    Console.WriteLine($"Výsledok: {evaluator.Visit(parserResult.Tree)}");
                    Console.WriteLine($"Derivácia: {differentiator.Visit(parserResult.Tree)}");
                }
                catch (Exception ex)
                {
                    Console.Clear();
                    Console.WriteLine("Vyskytla sa chyba!");
                    Console.WriteLine("Ohláste to prosím Maťovi.");
                    Console.WriteLine();
                    Console.WriteLine("Za pochopenie ďakujeme!");
                    Console.WriteLine("Váš kalkulátor");
                    Console.WriteLine();
                    Console.WriteLine();
                    Console.WriteLine("Chybová správa");
                    Console.WriteLine(ex.Message);
                }
                Console.ReadLine();
            }while (true);
        }
Example #21
0
        internal UserFunction[] GetFuncsFromCode(string code, ref InterpreterState state)
        {
            // Convert code to tokens
            LexerResult <TokenType> result = lexer.Tokenize(code);

            // Error out if lexer fails
            if (result.IsError)
            {
                throw new InterpreterException($"Parsing Error: {result.Error}");
            }

            Queue <Token <TokenType> > tokens = new Queue <Token <TokenType> >(result.Tokens);

            RootNode node = BuildTree <RootNode>(tokens, ref state, true);
            List <FunctionDefinitionNode> funcs = new List <FunctionDefinitionNode>();

            GetFuncsFromTree(node, ref funcs);

            return(funcs.Select(f => new UserFunction(f.Name, f.Args, f.Children)).ToArray());
        }
Example #22
0
        public void Parser_Report_ExtraParameters_OK()
        {
            // Arrange
            var lexerResult = new LexerResult
            {
                CommandToken    = "REPORT",
                ParameterTokens = new[]
                {
                    "FOO"
                }
            };
            var textWriter = new StringWriter();

            // Act
            var command = CommandInterpreter.Parser(lexerResult, textWriter);

            // Assert
            Assert.NotNull(command);
            Assert.IsType <ReportCommand>(command);
        }
        // deal with different results, and contral the lex groups.
        public bool OnLexed(LexerFramework sender, LexerResult e)
        {
            switch (sender.CurrentLexGroup)
            {
            case 0:
                if (e.Name == "Annotation")
                {
                    if ((string)e.Value == "//")
                    {
                        sender.CurrentLexGroup = 1;                             // single line annotation
                    }
                    if ((string)e.Value == "/*")
                    {
                        sender.CurrentLexGroup = 2;                             // multi-line annotation
                    }
                    return(false);
                }
                else
                {
                    // save the result
                    Lexeresults.Add(new KeyValuePair <string, object>(e.Name, e.Value));
                    return(true);
                }

            case 1:
                // single line annotation back to normal
                sender.CurrentLexGroup = 0;    // back to normal group.
                return(false);

            case 2:
                // multiple line annotation back to normal
                if (e.Name == "Annotation")
                {
                    sender.CurrentLexGroup = 0;                            // back to normal group.
                }
                return(false);

            default:
                return(false);
            }
        }
        // Find leftmost symbol for a sub-expression.
        // **No special handling for different symbol types; let parser try some n-best alternatives.
        public List <LexerResult> Start(LBT tree, int k)
        {
            // Note: will require modification to accomodate indexed symbols (summations, etc.)
            List <LexerResult> all_results = new List <LexerResult>();

            foreach (LBT.LBTNode node in tree.root.children)
            {
                //Console.WriteLine("START Considering: {0}", node.stroke);
                Stroke snc = node.stroke;


                // **REMOVING**
                //List< Stroke > strokes_to_segment = new List< Stroke >();
                //strokes_to_segment.Add( snc );
                //foreach ( Stroke s in tree.KNearestNeighbors( snc, k, LBT.DefaultKNNFilter ) )
                //	strokes_to_segment.Add( s );

                // Construct a list of candidate segments, and classify each.
                //List< Segment > candidateSegments = CreateSegmentsForStrokes( tree, strokes_to_segment );
                List <Segment> candidateSegments = new List <Segment>();
                candidateSegments.Add(strokeSegmentDictionary[snc.stroke_id]);
                foreach (Segment seg in candidateSegments)
                {
                    LexerResult result = new LexerResult();
                    classifier.classify(seg);
                    //Console.WriteLine(seg); // DEBUG
                    seg.PopulateBB();
                    if (seg.classification == null)
                    {
                        Console.Error.WriteLine("LexerResult.Start:: NO CLASSIFICATION RESULT");
                        continue;
                    }

                    result.segment = seg;
                    result.lbt     = UpdateLBT(seg, tree);
                    all_results.Add(result);
                }
            }

            return(all_results);
        }
Example #25
0
        public void fields()
        {
            var tokens = new[]
            {
                new LexerToken <int>(0, 0, 0),
                new LexerToken <int>(0, 0, 2),
                new LexerToken <int>(0, 2, 0)
            };

            var newlines = new[] { 1, 5 };

            var r = new LexerResult <int>(
                buffer: "foobar",
                tokens: tokens,
                newlines: newlines,
                hasInvalidTokens: true);

            Assert.IsTrue(r.HasInvalidTokens);
            Assert.AreEqual("foobar", r.Buffer);
            Assert.AreSame(tokens, r.Tokens);
            Assert.AreSame(newlines, r.Newlines);
            Assert.AreEqual(3, r.Count);
        }
Example #26
0
        public void get_string()
        {
            var tokens = new[]
            {
                new LexerToken <int>(0, 0, 3),
                new LexerToken <int>(0, 4, 1),
                new LexerToken <int>(0, 5, 0),
                new LexerToken <int>(0, 8, 3),
                new LexerToken <int>(0, 11, 0)
            };

            var r = new LexerResult <int>(
                buffer: "foo +\n  bar",
                tokens: tokens,
                newlines: new[] { 5 },
                hasInvalidTokens: true);

            Assert.AreEqual("foo", r.GetString(0));
            Assert.AreEqual("+", r.GetString(1));
            Assert.AreEqual("", r.GetString(2));
            Assert.AreEqual("bar", r.GetString(3));
            Assert.AreEqual("", r.GetString(4));
        }
Example #27
0
 public List <RegionResult> Parse(string code)
 {
     _lexer   = new ScriptLexer(code);
     _current = _lexer.GetToken();
     return(GetRegionBlocks());
 }
 private static string ToTokens <T>(LexerResult <T> result) where T : struct
 {
     return(result.Tokens.Aggregate(new StringBuilder(), (buf, token) => buf.Append(token.TokenID)).ToString());
 }
Example #29
0
 private static string ToTokens2 <T>(LexerResult <T> result) where T : struct
 {
     return(string.Join("", result.Tokens.Select(x => $"{x.TokenID}[{x.Value}]")));
 }
Example #30
0
        /// <summary>
        /// Contextual parse of tokens upto a given position.
        /// </summary>
        /// <param name="tokens">Result of parsing.</param>
        /// <param name="ignoreLastToken">If true, we don't consume the last token.</param>
        /// <remarks>
        /// Not using the last token can be useful for further usage, if we need the content
        /// of the token, but we are more interested in the parsing context above it.
        /// </remarks>
        public ContextResult Compute(LexerResult <TTok> tokens, bool ignoreLastToken)
        {
            var contextStack = new Stack <int>();

            var onError      = false;
            var readIndex    = 0;
            var token        = tokens.Tokens[0];
            var currentState = _initialState;
            var maxToken     = Math.Max(0, tokens.Count + (ignoreLastToken ? -1 : 0));

            while (true)
            {
                var action = ActionOf(currentState, (int)(object)token.Token);

                if (IsShiftAction(action))
                {
                    contextStack.Push(currentState);
                    currentState = action;

                    if (readIndex + 1 >= maxToken)
                    {
                        break;
                    }

                    token = tokens.Tokens[++readIndex];
                }
                else if (IsReduceAction(action))
                {
                    var rule     = -action;
                    var popCount = Rules.Rules[rule - Rules.TokenNames.Count].Steps.Count;

                    while (popCount-- > 1)
                    {
                        contextStack.Pop();
                    }

                    currentState = ActionOf(contextStack.Peek(), rule);
                }
                else // error
                {
                    onError = true;
                    readIndex--; /* we didn't consume the token in the end */
                    break;
                }
            }

            var baseIndex = BaseIndexOf(currentState);

            var tokSet = new HashSet <TTok>(Enumerable
                                            .Range(0, Rules.TokenNames.Count)
                                            .Where(i => IsShiftAction(_actions[baseIndex + i]))
                                            .Select(i => (TTok)(object)i));

            // we minimize the token displayed using the token namer,
            // using the foldable information.
            var minimalTokens = tokSet
                                .Where(t => !_namer.IsFolded(t, tokSet))
                                .Select(t => new ShiftContext(t, _stateContexts[_actions[baseIndex + (int)(object)t]]))
                                .ToArray();

            return(new ContextResult(
                       context: contextStack
                       .Where(st => _stateContexts[st].Count > 0)
                       .Select(st => new ParseFrame(_stateContexts[st]))
                       .ToArray(),
                       comeAfter: minimalTokens,
                       lastToken: readIndex >= 0 ? tokens.Tokens[readIndex].Token : default(TTok),
                       lastPosition: readIndex,
                       isOnError: onError));
        }