コード例 #1
0
 public void SetUp()
 {
     tokenizer = new Tokenizer();
     tokenizer.Add("_");
     tokenizer.Add("__");
     tokenizer.Build();
 }
コード例 #2
0
        public void Add_DuplicateTokenDefinition_ThrowsArgumentException()
        {
            var tokenizer = new Tokenizer <TokenTypeTest>(TokenTypeTest.Invalid, TokenTypeTest.Eol, TokenTypeTest.Eof, false);

            var td = new TokenDefinition <TokenTypeTest>(TokenTypeTest.D, "");

            tokenizer.Add(td);

            Assert.Throws <ArgumentException>(() => tokenizer.Add(td));
        }
コード例 #3
0
        public void Tokenize_MultipleMatches_ExpectsOrderedByPresidence()
        {
            var tokenizer = new Tokenizer <TokenTypeTest>(TokenTypeTest.Invalid, TokenTypeTest.Eol, TokenTypeTest.Eof, false);

            tokenizer.Add(new TokenDefinition <TokenTypeTest>(TokenTypeTest.E, @"hellowo", precedence: 2));
            tokenizer.Add(new TokenDefinition <TokenTypeTest>(TokenTypeTest.D, @"helloworld", precedence: 1));

            var matches = tokenizer.Tokenize("helloworld", 1).ToList();

            Assert.Equal(TokenTypeTest.D, matches[0].TokenType);
        }
コード例 #4
0
        public void Tokenize_InvalidInputStart_ExpectsInvalidType()
        {
            var tokenizer = new Tokenizer <TokenTypeTest>(TokenTypeTest.Invalid, TokenTypeTest.Eol, TokenTypeTest.Eof, false);

            tokenizer.Add(new TokenDefinition <TokenTypeTest>(TokenTypeTest.E, @"e"));
            tokenizer.Add(new TokenDefinition <TokenTypeTest>(TokenTypeTest.D, @"d"));

            var matches = tokenizer.Tokenize("abced", 1).ToList();

            var token = matches[0];

            Assert.Equal(TokenTypeTest.Invalid, token.TokenType);
            Assert.Equal("abc", token.Value);
        }
コード例 #5
0
        public void FindAllTokenMatches_MultipleMatches_ExpectedCountAndValue()
        {
            var tokenizer = new Tokenizer <TokenTypeTest>(TokenTypeTest.Invalid, TokenTypeTest.Eol, TokenTypeTest.Eof, false);

            tokenizer.Add(new TokenDefinition <TokenTypeTest>(TokenTypeTest.D, @"S\d\d"));
            tokenizer.Add(new TokenDefinition <TokenTypeTest>(TokenTypeTest.E, @"E\d\d"));

            var matches = tokenizer.FindAllTokenMatches("MyVideoS23E13OtherE12S11", 1).ToList();

            Assert.Equal(4, matches.Count);

            Assert.Equal("S23", matches[0].Value);
            Assert.Equal("S11", matches[1].Value);
            Assert.Equal("E13", matches[2].Value);
            Assert.Equal("E12", matches[3].Value);
        }
コード例 #6
0
        public void Add_Count_ExpectsOne()
        {
            var tokenizer = new Tokenizer <TokenTypeTest>(TokenTypeTest.Invalid, TokenTypeTest.Eol, TokenTypeTest.Eof, false);

            tokenizer.Add(new TokenDefinition <TokenTypeTest>(TokenTypeTest.D, ""));

            Assert.Single(tokenizer.TokenDefinitions);
        }
コード例 #7
0
        public void Clear_AddTokenDef_ExpectsZero()
        {
            var tokenizer = new Tokenizer <TokenTypeTest>(TokenTypeTest.Invalid, TokenTypeTest.Eol, TokenTypeTest.Eof, false);

            tokenizer.Add(new TokenDefinition <TokenTypeTest>(TokenTypeTest.D, @"S\d\d"));

            tokenizer.Clear();

            Assert.Empty(tokenizer.TokenDefinitions);
        }
コード例 #8
0
        public void Tokenize_AnyInputString_EndsWithEolToken()
        {
            var tokenizer = new Tokenizer <TokenTypeTest>(TokenTypeTest.Invalid, TokenTypeTest.Eol, TokenTypeTest.Eof, false);

            tokenizer.Add(new TokenDefinition <TokenTypeTest>(TokenTypeTest.D, @"S\d\d"));

            var matches   = tokenizer.Tokenize("MyVideoS23E13OtherE12S11", 1).ToList();
            var lastToken = matches.Last();

            Assert.Equal(TokenTypeTest.Eol, lastToken.TokenType);
        }
コード例 #9
0
        public void TokenizeEnumerable_MultipleLines_ExpectsValidLineNumbers()
        {
            var tokenizer = new Tokenizer <TokenTypeTest>(TokenTypeTest.Invalid, TokenTypeTest.Eol, TokenTypeTest.Eof, false);

            tokenizer.Add(new TokenDefinition <TokenTypeTest>(TokenTypeTest.E, @"a"));
            tokenizer.Add(new TokenDefinition <TokenTypeTest>(TokenTypeTest.D, @"b"));
            tokenizer.Add(new TokenDefinition <TokenTypeTest>(TokenTypeTest.D, @"c"));

            var matches = tokenizer.Tokenize(new string[] {
                "b a",
                "c"
            }).ToList();

            var token = matches[0];

            Assert.Equal(1, token.LineNumber);

            token = matches[matches.Count - 2];
            Assert.Equal(2, token.LineNumber);
        }
コード例 #10
0
        public void Remove_ExistingTokenDefinition_ExpectsDecrease()
        {
            var tokenizer = new Tokenizer <TokenTypeTest>(TokenTypeTest.Invalid, TokenTypeTest.Eol, TokenTypeTest.Eof, false);

            var td = new TokenDefinition <TokenTypeTest>(TokenTypeTest.D, "");

            tokenizer.Add(td);
            tokenizer.Remove(td);

            Assert.DoesNotContain(td, tokenizer.TokenDefinitions);
        }
コード例 #11
0
        public void Remove_NonExistingTokenDefinition_ExpectsNoChange()
        {
            var tokenizer = new Tokenizer <TokenTypeTest>(TokenTypeTest.Invalid, TokenTypeTest.Eol, TokenTypeTest.Eof, false);

            var td1 = new TokenDefinition <TokenTypeTest>(TokenTypeTest.D, "");

            tokenizer.Add(td1);

            var td2 = new TokenDefinition <TokenTypeTest>(TokenTypeTest.E, "");

            tokenizer.Remove(td2);

            Assert.Single(tokenizer.TokenDefinitions);
        }
コード例 #12
0
        protected virtual void Init()
        {
            ISet <Grammar <TokenType, IProcessor> > grammers = new HashSet <Grammar <TokenType, IProcessor> > {
                new Grammar <TokenType, IProcessor>(TokenType.Pipe)
                {
                    Rules = { new LookaheadRule <TokenType>(TokenType.Function, TokenType.TextValue, TokenType.StringValue) }
                },

                new Grammar <TokenType, IProcessor>(TokenType.StringValue, TokenType.TextValue)
                {
                    IncludeTriggerToken = true,
                    Construct           = x => new ItemsProcessor(x.Cast <string>()),
                    Rules =
                    {
                        new RepeatingSequenceRule <TokenType>(false, TokenType.Seperator)
                        {
                            new Rule <TokenType>(TokenType.StringValue, TokenType.TextValue)
                        },
                        new LookaheadRule <TokenType>(TokenType.Pipe, TokenType.Eol)
                    }
                },
            };



            Tokenizer.LoadTokenDefinitions();
            foreach (TokenDefinition <TokenType> def in FsuLanguageSpec.GetTokenDefinitions())
            {
                Tokenizer.Add(def);
            }



            IEnumerable <Grammar <TokenType, IProcessor> > allGrammers = grammers.Concat(FsuLanguageSpec.GetGrammers());

            // Ensure that all functions have a pipe or be the end of line after them.
            foreach (Grammar <TokenType, IProcessor> grammer in allGrammers.Where(x => x.TriggerTokens[0].Equals(TokenType.Function)))
            {
                grammer.Rules.Add(new LookaheadRule <TokenType>(TokenType.Pipe, TokenType.Eol));
            }

            Parser.Clear();
            foreach (Grammar <TokenType, IProcessor> grammer in allGrammers)
            {
                Parser.Add(grammer);
            }
        }
コード例 #13
0
        public void Add_NullTokenDefinition_ThrowsArgumentNullException()
        {
            var tokenizer = new Tokenizer <TokenTypeTest>(TokenTypeTest.Invalid, TokenTypeTest.Eol, TokenTypeTest.Eof, false);

            Assert.Throws <ArgumentNullException>(() => tokenizer.Add(null));
        }