/// <summary> /// Deserializes a potentially endless sequence of objects from a stream source /// </summary> /// <param name="input">a streamed source of objects</param> /// <returns>a sequence of objects</returns> /// <remarks> /// character stream => token stream => object stream /// </remarks> public IEnumerable ReadMany(TextReader input) { ITextTokenizer <T> tokenizer = this.GetTokenizer(); if (tokenizer == null) { throw new ArgumentNullException("tokenizer"); } ITokenAnalyzer <T> analyzer = this.GetAnalyzer(); if (analyzer == null) { throw new ArgumentNullException("analyzer"); } try { // chars stream => token stream => object stream return(analyzer.Analyze(tokenizer.GetTokens(input))); } catch (DeserializationException) { throw; } catch (Exception ex) { throw new DeserializationException(ex.Message, tokenizer.Index, tokenizer.Line, tokenizer.Column, ex); } }
public IdentityService( IHttpClientFactory httpClientFactory, ITokenAnalyzer tokenAnalyzer, IAuthTokenStore authTokenStore) { _httpClientFactory = httpClientFactory; _tokenAnalyzer = tokenAnalyzer; _authTokenStore = authTokenStore; }
public AuthenticationSessionService( IHttpContextAccessor httpContextAccessor, ITokenAnalyzer tokenAnalyzer, IIdentityService identityService) { _httpContextAccessor = httpContextAccessor; _tokenAnalyzer = tokenAnalyzer; _identityService = identityService; }
/// <summary> /// Ctor /// </summary> /// <param name="analyzer"></param> /// <param name="sequences"></param> public QueryProvider(ITokenAnalyzer<ModelTokenType> analyzer, IQueryable<TokenSequence> sequences) { if (analyzer == null) { throw new ArgumentNullException("analyzer"); } if (sequences == null) { throw new ArgumentNullException("sequences"); } this.Engine = new QueryEngine(analyzer, sequences); }
/// <summary> /// Ctor /// </summary> /// <param name="analyzer"></param> /// <param name="sequences"></param> public QueryProvider(ITokenAnalyzer <ModelTokenType> analyzer, IQueryable <TokenSequence> sequences) { if (analyzer == null) { throw new ArgumentNullException("analyzer"); } if (sequences == null) { throw new ArgumentNullException("sequences"); } this.Engine = new QueryEngine(analyzer, sequences); }
/// <summary> /// Ctor /// </summary> /// <param name="analyzer"></param> /// <param name="sequence"></param> public QueryEngine(ITokenAnalyzer <ModelTokenType> analyzer, IQueryable <TokenSequence> input) { if (analyzer == null) { throw new ArgumentNullException("analyzer"); } if (input == null) { throw new ArgumentNullException("values"); } this.Analyzer = analyzer; this.Resolver = analyzer.Settings.Resolver; this.Source = input; }
private object ReadSingle(ITextTokenizer <T> tokenizer, IEnumerable <Token <T> > tokens, Type targetType) { ITokenAnalyzer <T> analyzer = this.GetAnalyzer(); if (analyzer == null) { throw new ArgumentNullException("analyzer"); } try { IEnumerator enumerator = analyzer.Analyze(tokens, targetType).GetEnumerator(); if (!enumerator.MoveNext()) { return(null); } // character stream => token stream => object stream object value = enumerator.Current; // enforce only one object in stream if (!this.Settings.AllowTrailingContent && enumerator.MoveNext()) { throw new DeserializationException("Invalid trailing content", tokenizer.Index, tokenizer.Line, tokenizer.Column); } return(value); } catch (DeserializationException) { throw; } catch (Exception ex) { throw new DeserializationException(ex.Message, tokenizer.Index, tokenizer.Line, tokenizer.Column, ex); } }
public TokenModel?AnalyzeToken( [Service] ITokenAnalyzer analyzer, string token) { return(analyzer.Analyze(token)); }
/// <summary> /// Ctor /// </summary> /// <param name="analyzer"></param> /// <param name="sequences"></param> internal Query(ITokenAnalyzer <ModelTokenType> analyzer, IEnumerable <TokenSequence> sequences) : base(new QueryProvider(analyzer, sequences.AsQueryable())) { this.Analyzer = analyzer; this.Sequences = sequences; }
/// <summary> /// Ctor /// </summary> /// <param name="analyzer"></param> /// <param name="sequence"></param> internal Query(ITokenAnalyzer <ModelTokenType> analyzer, TokenSequence sequence) : this(analyzer, sequence.SplitValues()) { }