Example #1
0
        public async Task TokenTestAsync()
        {
            var tokenFilter         = new TokenFilter();
            IApiActionFilter filter = tokenFilter;

            await filter.OnBeginRequestAsync(null);

            Assert.True(tokenFilter.IsRequestTokenResult && tokenFilter.IsRequestRefreshToken == false && tokenFilter.TokenResult.IdTken == "0");
            await filter.OnEndRequestAsync(null);

            tokenFilter.IsRequestTokenResult  = false;
            tokenFilter.IsRequestRefreshToken = false;

            await Task.Delay(500);

            await filter.OnBeginRequestAsync(null);

            Assert.True(tokenFilter.IsRequestTokenResult == false && tokenFilter.IsRequestRefreshToken == false && tokenFilter.TokenResult.IdTken == "0");
            await filter.OnEndRequestAsync(null);

            tokenFilter.IsRequestTokenResult  = false;
            tokenFilter.IsRequestRefreshToken = false;


            await Task.Delay(1100);

            await filter.OnBeginRequestAsync(null);

            Assert.True(tokenFilter.IsRequestTokenResult == false && tokenFilter.IsRequestRefreshToken && tokenFilter.TokenResult.IdTken == "1");
            await filter.OnEndRequestAsync(null);
        }
        public void NGramTokenFilterRoundtrips(string odataType)
        {
            string jsonContent = $@"{{
    ""@odata.type"": ""{odataType}"",
    ""name"": ""test"",
    ""minGram"": 0,
    ""maxGram"": 1
}}";

            JsonDocument     jsonDoc = JsonDocument.Parse(jsonContent);
            NGramTokenFilter sut     = TokenFilter.DeserializeTokenFilter(jsonDoc.RootElement) as NGramTokenFilter;

            Assert.NotNull(sut);
            Assert.AreEqual(odataType, sut.ODataType);
            Assert.AreEqual("test", sut.Name);
            Assert.AreEqual(0, sut.MinGram);
            Assert.AreEqual(1, sut.MaxGram);

            using MemoryStream stream = new MemoryStream();
            using (Utf8JsonWriter writer = new Utf8JsonWriter(stream))
            {
                ((IUtf8JsonSerializable)sut).Write(writer);
            }

            stream.Position = 0;

            jsonDoc = JsonDocument.Parse(stream);
            Assert.True(jsonDoc.RootElement.TryGetProperty("@odata.type", out JsonElement odataTypeElem));
            Assert.AreEqual(odataType, odataTypeElem.GetString());
        }
Example #3
0
        public void ThrowOnUnrecognizedDirective()
        {
            Lexer       lexer  = new Lexer("{$FOO}", "");
            TokenFilter filter = new TokenFilter(lexer.Tokens, _defines, _fileLoader);

            new List <Token>(filter.Tokens);
        }
Example #4
0
 protected void getFilterItems()
 {
     _tokenFilter = sessionData.getDataItem("TokenFilter") as TokenFilter;
     if (_tokenFilter == null)
     {
         _tokenFilter = new TokenFilter();
         sessionData.addDataItem("TokenFilter", _tokenFilter);
     }
     _linkedInCacheKey = _tokenFilter.linkedInCacheKey;
     _facebookCacheKey = _tokenFilter.facebookCacheKey;
     _twitterCacheKey  = _tokenFilter.twitterCacheKey;
     if (_twitterCacheKey != Constants.InvalidIdString && (_tokenFilter.twitterAuthToken == Constants.InvalidIdString || _tokenFilter.twitterAuthToken == null))
     {
         _tokenFilter.twitterAuthToken = CacheManager.getItemFromCache(_tokenFilter.twitterCacheKey, club.clubIdString) as string;
         _tokenFilter.twitterSecret    = CacheManager.getItemFromCache(_tokenFilter.twitterSecretKey, club.clubIdString) as string;
     }
     if (_linkedInCacheKey != Constants.InvalidIdString && (_tokenFilter.linkedinToken == Constants.InvalidIdString || _tokenFilter.linkedinToken == null))
     {
         _tokenFilter.linkedinToken = CacheManager.getItemFromCache(_tokenFilter.linkedInCacheKey, club.clubIdString) as string;
     }
     if (_facebookCacheKey != Constants.InvalidIdString && (_tokenFilter.facebookToken == Constants.InvalidIdString || _tokenFilter.facebookToken == null))
     {
         _tokenFilter.facebookToken = CacheManager.getItemFromCache(_tokenFilter.facebookCacheKey, club.clubIdString) as string;
     }
     _twitterToken  = _tokenFilter.twitterAuthToken;
     _linkedInToken = _tokenFilter.linkedinToken;
     _facebookToken = _tokenFilter.facebookToken;
 }
Example #5
0
        public void ThrowOnUnrecognizedDirective()
        {
            Lexer       lexer  = new Lexer("{$FOO}", "");
            TokenFilter filter = new TokenFilter(lexer.Tokens, _defines, _fileLoader);

            Assert.Throws <LexException>(() => new List <Token>(filter.Tokens));
        }
Example #6
0
 private Constraint LexesAndFiltersAs(params string[] expected)
 {
     return(new LexesAsConstraint(expected, delegate(IEnumerable <Token> tokens)
     {
         TokenFilter filter = new TokenFilter(tokens, _defines, _fileLoader);
         return filter.Tokens;
     }));
 }
Example #7
0
        private MofFileParser(string mofFilePath)
        {
            this.m_filePath = mofFilePath;
            string str = File.ReadAllText(mofFilePath);

            this.m_filteredTokens = new TokenFilter(new Tokenizer.Tokenizer(str, mofFilePath), new Predicate <Token>(MofFileParser.IgnoreWhitespaceFilter));
            this.m_queue          = new Queue <Token>(this.m_filteredTokens);
        }
Example #8
0
        private List <string> Filtre(TokenFilter filter)
        {
            List <string>  tas     = new List <string>();
            ITermAttribute termAtt = filter.GetAttribute <ITermAttribute>();

            while (filter.IncrementToken())
            {
                tas.Add(termAtt.Term);
            }
            return(tas);
        }
Example #9
0
            private TokenFilterSpec NewFilterChain(Random random, Tokenizer tokenizer, bool offsetsAreCorrect)
            {
                TokenFilterSpec spec = new TokenFilterSpec();

                spec.offsetsAreCorrect = offsetsAreCorrect;
                spec.stream            = tokenizer;
                StringBuilder descr      = new StringBuilder();
                int           numFilters = random.nextInt(5);

                for (int i = 0; i < numFilters; i++)
                {
                    // Insert ValidatingTF after each stage so we can
                    // catch problems right after the TF that "caused"
                    // them:
                    spec.stream = new ValidatingTokenFilter(spec.stream, "stage " + i, spec.offsetsAreCorrect);

                    while (true)
                    {
                        ConstructorInfo ctor = tokenfilters[random.nextInt(tokenfilters.size())];

                        // hack: MockGraph/MockLookahead has assertions that will trip if they follow
                        // an offsets violator. so we cant use them after e.g. wikipediatokenizer
                        if (!spec.offsetsAreCorrect &&
                            (ctor.DeclaringType.Equals(typeof(MockGraphTokenFilter))) ||
                            ctor.DeclaringType.Equals(typeof(MockRandomLookaheadTokenFilter)))
                        {
                            continue;
                        }

                        object[] args = NewFilterArgs(random, spec.stream, ctor.GetParameters().Select(p => p.ParameterType).ToArray());
                        if (Broken(ctor, args))
                        {
                            continue;
                        }
                        TokenFilter flt = CreateComponent <TokenFilter>(ctor, args, descr);
                        if (flt != null)
                        {
                            spec.offsetsAreCorrect &= !BrokenOffsets(ctor, args);
                            spec.stream             = flt;
                            break;
                        }
                    }
                }

                // Insert ValidatingTF after each stage so we can
                // catch problems right after the TF that "caused"
                // them:
                spec.stream = new ValidatingTokenFilter(spec.stream, "last stage", spec.offsetsAreCorrect);

                spec.toString = descr.toString();
                return(spec);
            }
Example #10
0
 private IList<string> Filter(TokenFilter filter)
 {
     IList<string> tas = new List<string>();
     ICharTermAttribute termAtt = filter.GetAttribute<ICharTermAttribute>();
     filter.Reset();
     while (filter.IncrementToken())
     {
         tas.Add(termAtt.ToString());
     }
     filter.End();
     filter.Dispose();
     return tas;
 }
Example #11
0
 //JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET:
 //ORIGINAL LINE: private java.util.List<String> filter(org.apache.lucene.analysis.TokenFilter filter) throws java.io.IOException
 private IList<string> filter(TokenFilter filter)
 {
     IList<string> tas = new List<string>();
     CharTermAttribute termAtt = filter.getAttribute(typeof(CharTermAttribute));
     filter.reset();
     while (filter.incrementToken())
     {
       tas.Add(termAtt.ToString());
     }
     filter.end();
     filter.close();
     return tas;
 }
Example #12
0
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET:
//ORIGINAL LINE: private java.util.List<String> filter(org.apache.lucene.analysis.TokenFilter filter) throws java.io.IOException
        private IList <string> filter(TokenFilter filter)
        {
            IList <string>    tas     = new List <string>();
            CharTermAttribute termAtt = filter.getAttribute(typeof(CharTermAttribute));

            filter.reset();
            while (filter.incrementToken())
            {
                tas.Add(termAtt.ToString());
            }
            filter.end();
            filter.close();
            return(tas);
        }
Example #13
0
        private IList <string> Filter(TokenFilter filter)
        {
            IList <string>     tas     = new List <string>();
            ICharTermAttribute termAtt = filter.GetAttribute <ICharTermAttribute>();

            filter.Reset();
            while (filter.IncrementToken())
            {
                tas.Add(termAtt.ToString());
            }
            filter.End();
            filter.Dispose();
            return(tas);
        }
Example #14
0
 public NodeTokensFormatRule(NodeType applyToNodeType, TokenFilter filter)
 {
     ApplyToNodeType = applyToNodeType;
     Filter          = filter;
 }
Example #15
0
 private MofFileParser(IEnumerable <char> inputStream, string documentPath)
 {
     this.m_filePath       = documentPath;
     this.m_filteredTokens = new TokenFilter(new Tokenizer.Tokenizer(inputStream, documentPath), new Predicate <Token>(MofFileParser.IgnoreWhitespaceFilter));
     this.m_queue          = new Queue <Token>(this.m_filteredTokens);
 }