public Shell.Types.IShellReturnable ResultOf(string src, bool program) { logger.Verbose($"Source:\n{src.Trim()}"); ICharStream charStream = CharStreams.fromstring(src); lexer = new ShellLexer(charStream); ITokenStream tokenStream = new CommonTokenStream(lexer); parser = new ShellParser(tokenStream) { BuildParseTree = true, ErrorHandler = new BailErrorStrategy() }; parser.RemoveErrorListeners(); parser.AddErrorListener(new SyntaxErrorListener()); IParseTree tree; // select the appropriate start rule if (program) { tree = parser.program(); } else { tree = parser.statement(); } logger.Debug($"Parse tree:\n{tree.ToStringTree(parser)}"); visitor.tokenSource = lexer.InputStream; return(visitor.Visit(tree)); }
/// <summary> /// Extracts expressions from the token stream and calls the corresponding handler. /// </summary> /// <param name="tokenSource">The source of SQL tokens.</param> /// <returns>The results of the parse.</returns> protected MatchResult GetResult(ITokenSource tokenSource) { Parser parser = new Parser(grammar); var matchedStatement = parser.Parse(SqlGrammar.Start.Name, tokenSource); return(matchedStatement); }
private static async Task <bool> WaitForAsync( [NotNull] ServiceController serviceController, ServiceControllerStatus status, CancellationToken token = default(CancellationToken)) { if (serviceController == null) { throw new ArgumentNullException("serviceController"); } // TODO Remove constant timeout using (ITokenSource tokenSource = token.WithTimeout(TimeSpan.FromMinutes(1))) { token = tokenSource.Token; serviceController.Refresh(); while (serviceController.Status != status) { // ReSharper disable once PossibleNullReferenceException await Task.Delay(250, token).ConfigureAwait(false); if (token.IsCancellationRequested) { return(false); } serviceController.Refresh(); } } return(true); }
public ITokenSource CreateTokenSource(string field, ITokenSource existing) { var tokenSource = existing ?? new StringTokenizer(); tokenSource.Position = 0; return(tokenSource); }
public UnbufferedTokenStream(ITokenSource tokenSource, int bufferSize) { this.TokenSource = tokenSource; this.tokens = new IToken[bufferSize]; n = 0; Fill(1); }
/// <summary>Reset this token stream by setting its token source.</summary> /// <remarks>Reset this token stream by setting its token source.</remarks> public virtual void SetTokenSource(ITokenSource tokenSource) { this._tokenSource = tokenSource; tokens.Clear(); p = -1; this.fetchedEOF = false; }
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline. public void Configure(IApplicationBuilder app, IWebHostEnvironment env, ITokenSource tokenSource, ILogger <FakeTokenController> logger) { if (env.IsDevelopment()) { app.UseDeveloperExceptionPage(); } app.UseRouting() .UseAuthentication() .UseAuthorization() .UseEndpoints(endpoints => { endpoints.MapGrpcService <GreeterService>(); endpoints.MapGrpcService <CustomersMaintenanceService>(); endpoints.MapGrpcService <PlayDiceService>(); endpoints.MapGrpcService <InterceptorDemoService>(); endpoints.MapGrpcService <GrpcAuthDemoServImpl>(); endpoints.MapGrpcReflectionService(); endpoints.MapGet("/jwt/token", async context => { var controller = new FakeTokenController(tokenSource, context, logger); var result = await controller.GetToken(); context.Response.StatusCode = result.StatusCode; await context.Response.WriteAsync(result.Result); }); endpoints.MapGet("/", async context => { await context.Response.WriteAsync("Grpc client is ready for requests"); }); }); }
/** <summary>Reset this token stream by setting its token source.</summary> */ public virtual void SetTokenSource(ITokenSource tokenSource) { this.tokenSource = tokenSource; tokens.Clear(); p = -1; channel = TokenConstants.DEFAULT_CHANNEL; }
public bool ProcessTerm(ITokenSource source) { var term = new ArraySegmentKey<char>(source.Buffer, source.Size); if (_stopWords.Contains(term)) return false; return true; }
/** <summary>Reset this token stream by setting its token source.</summary> */ public void SetTokenSource(ITokenSource <SlimToken> tokenSource) { this._tokenSource = tokenSource; tokens.Clear(); p = -1; channel = TokenChannels.Default; }
public static Interval GetSourceInterval([NotNull] ParserRuleContext context) { Requires.NotNull(context, nameof(context)); int startIndex = context.Start.StartIndex; IToken stopSymbol = GetStopSymbol(context); if (stopSymbol == null) { return(new Interval(startIndex, startIndex - 1)); } int stopIndex; if (stopSymbol.Type != TokenConstants.Eof) { stopIndex = stopSymbol.StopIndex; } else { ITokenSource tokenSource = context.Start.TokenSource; ICharStream inputStream = tokenSource != null ? tokenSource.InputStream : null; if (inputStream != null) { stopIndex = inputStream.Size - 1; } else { stopIndex = context.Start.StartIndex - 1; } } stopIndex = Math.Max(stopIndex, startIndex - 1); return(new Interval(startIndex, stopIndex)); }
public List<Result> ExecuteQueryFunc(Query query, ITokenSource cancelToken) { if (QueryFunc != null) { return QueryFunc(query, cancelToken); } return new List<Result>(); }
public static Interval GetSourceInterval(ParserRuleContext context) { Contract.Requires <ArgumentNullException>(context != null, "context"); int startIndex = context.Start.StartIndex; IToken stopSymbol = GetStopSymbol(context); if (stopSymbol == null) { return(new Interval(startIndex, startIndex - 1)); } int stopIndex; if (stopSymbol.Type != TokenConstants.Eof) { stopIndex = stopSymbol.StopIndex; } else { ITokenSource tokenSource = context.Start.TokenSource; ICharStream inputStream = tokenSource != null ? tokenSource.InputStream : null; if (inputStream != null) { stopIndex = inputStream.Size - 1; } else { stopIndex = context.Start.StartIndex - 1; } } stopIndex = Math.Max(stopIndex, startIndex - 1); return(new Interval(startIndex, stopIndex)); }
/** <summary>Reset this token stream by setting its token source.</summary> */ public virtual void SetTokenSource(ITokenSource tokenSource) { this._tokenSource = tokenSource; tokens.Clear(); p = -1; channel = TokenChannels.Default; }
public virtual void SetTokenSource(ITokenSource tokenSource) { this._tokenSource = tokenSource; this.tokens.Clear(); this.p = -1; this.channel = 0; }
public bool ProcessTerm(ITokenSource source) { for (int i = 0; i < source.Size; i++) { source.Buffer[i] = char.ToLowerInvariant(source.Buffer[i]); } return true; }
public BufferedTokenStream(ITokenSource tokenSource) { if (tokenSource == null) { throw new ArgumentNullException("tokenSource cannot be null"); } this._tokenSource = tokenSource; }
public bool ProcessTerm(ITokenSource source) { for (int i = 0; i < source.Size; i++) { source.Buffer[i] = char.ToLowerInvariant(source.Buffer[i]); } return(true); }
protected internal virtual IToken ConstructToken(ITokenSource tokenSource, int expectedTokenType , string tokenText, IToken current) { ITokenFactory factory = tokenSource.TokenFactory; return(factory.Create(Tuple.Create(tokenSource, current.TokenSource.InputStream), expectedTokenType, tokenText, TokenConstants.DefaultChannel, -1, -1, current. Line, current.Column)); }
public bool Process(string field, ITokenSource source) { for (int i = 0; i < _filters.Length; i++) { if (_filters[i].ProcessTerm(source) == false) return false; } return true; }
public IObservable <Response> Send( [NotNull] Request request, CancellationToken token = default(CancellationToken)) { OverlappingPipeClientStream stream = _stream; if (_state != PipeState.Connected || stream == null) { // ReSharper disable once AssignNullToNotNullAttribute return(Observable.Empty <Response>()); } // ReSharper disable once AssignNullToNotNullAttribute return(Observable.Create <Response>( async(observer, t) => { Debug.Assert(observer != null); using (ITokenSource tokenSource = token.CreateLinked(t)) { token = tokenSource.Token; ConnectedCommand cr = new ConnectedCommand(request, observer); _commandRequests.TryAdd(request.ID, cr); try { await stream.WriteAsync(request.Serialize(), token).ConfigureAwait(false); await cr.CompletionTask.WithCancellation(token).ConfigureAwait(false); } // ReSharper disable once EmptyGeneralCatchClause catch { } // If the command is not explicitly cancelled and is still running, and we've been cancelled // then ask the server to cancel. if (!cr.IsCancelled && !cr.IsCompleted && token.IsCancellationRequested) { try { using (CancellationTokenSource cts = Constants.FireAndForgetTokenSource) await CancelCommand(request.ID, cts.Token).ConfigureAwait(false); } catch (TaskCanceledException) { } } // Remove the command request. _commandRequests.TryRemove(request.ID, out cr); } })); }
public bool ProcessTerm(ITokenSource source) { var term = new ArraySegmentKey <char>(source.Buffer, source.Size); if (_stopWords.Contains(term)) { return(false); } return(true); }
// public object Run(string parameters)//, CancellationToken ct) // { // for (int i = 0; i < 2; i++) // { // //ct.ThrowIfCancellationRequested(); // Thread.Sleep(5000); // } // return "Hello from MyCustomJob in App1"; // } public void Run(ITokenSource tokenSource) { for (int i = 0; i < 5; i++) { tokenSource.Token.ThrowIfCancellationRequested(); Thread.Sleep(5000); } AutoMapper.Mapper.CreateMap<Class1, Class2>(); }
public bool Process(string field, ITokenSource source) { for (int i = 0; i < _filters.Length; i++) { if (_filters[i].ProcessTerm(source) == false) { return(false); } } return(true); }
/// <summary> /// Parses the given token source using the specified grammar, starting with /// expression with the given name. /// </summary> /// <param name="expressionType">The type of the expression to start parsing.</param> /// <param name="tokenSource">The source of tokens.</param> public MatchResult Parse(string expressionType, ITokenSource tokenSource) { if (tokenSource == null) { throw new ArgumentNullException("tokenSource"); } Expression expression = grammar.Expression(expressionType); ParseAttempt attempt = new ParseAttempt(this, tokenSource); MatchResult result = expression.Match(attempt, String.Empty); return(result); }
/// <summary>The preferred method of getting a tree pattern.</summary> /// <remarks> /// The preferred method of getting a tree pattern. For example, here's a /// sample use: /// <pre> /// ParseTree t = parser.expr(); /// ParseTreePattern p = parser.compileParseTreePattern("<ID>+0", MyParser.RULE_expr); /// ParseTreeMatch m = p.match(t); /// String id = m.get("ID"); /// </pre> /// </remarks> public virtual ParseTreePattern CompileParseTreePattern(string pattern, int patternRuleIndex) { if (((ITokenStream)InputStream) != null) { ITokenSource tokenSource = ((ITokenStream)InputStream).TokenSource; if (tokenSource is Lexer) { Lexer lexer = (Lexer)tokenSource; return(CompileParseTreePattern(pattern, patternRuleIndex, lexer)); } } throw new NotSupportedException("Parser can't discover a lexer to use"); }
protected virtual bool TokenEndsAtEndOfLine(ITextSnapshot snapshot, ITokenSource lexer, IToken token) { Lexer lexerLexer = lexer as Lexer; if (lexerLexer != null) { int c = lexerLexer.CharStream.LA(1); return(c == '\r' || c == '\n'); } ITextSnapshotLine line = snapshot.GetLineFromPosition(token.StopIndex + 1); return(line.End <= token.StopIndex + 1 && line.EndIncludingLineBreak >= token.StopIndex + 1); }
/// <summary> /// Create a caret token without container token /// </summary> public CaretToken( ITokenSource source, ICharStream stream, int position) : base( new Antlr4.Runtime.Sharpen.Tuple <ITokenSource, ICharStream>(source, stream), TokenType, Lexer.DefaultTokenChannel, position, position) { ParentToken = null; ParentOffset = -1; }
protected virtual bool IsMultilineToken(ITextSnapshot snapshot, ITokenSource lexer, IToken token) { Lexer lexerLexer = lexer as Lexer; if (lexerLexer != null && lexerLexer.Line >= token.Line) { return(false); } int startLine = snapshot.GetLineNumberFromPosition(token.StartIndex); int stopLine = snapshot.GetLineNumberFromPosition(token.StopIndex + 1); return(startLine != stopLine); }
/// <summary> /// Initializes a new instance of the <see cref="DurableHttpRequest"/> class. /// </summary> /// <param name="method">Method used for HTTP request.</param> /// <param name="uri">Uri used to make the HTTP request.</param> /// <param name="headers">Headers added to the HTTP request.</param> /// <param name="content">Content added to the body of the HTTP request.</param> /// <param name="tokenSource">AAD authentication attached to the HTTP request.</param> /// <param name="asynchronousPatternEnabled">Specifies whether the DurableHttpRequest should handle the asynchronous pattern.</param> public DurableHttpRequest( HttpMethod method, Uri uri, IDictionary <string, StringValues> headers = null, string content = null, ITokenSource tokenSource = null, bool asynchronousPatternEnabled = true) { this.Method = method; this.Uri = uri; this.Headers = HttpHeadersConverter.CreateCopy(headers); this.Content = content; this.TokenSource = tokenSource; this.AsynchronousPatternEnabled = asynchronousPatternEnabled; }
public bool ProcessTerm(ITokenSource source) { if (source.Size <= 2) return true; if (source.Buffer[source.Size - 1] == '\'') // remove "boys' ball" suffix ' { source.Size--; } // remove "boy's ball" suffix 's else if ((source.Buffer[source.Size - 1] == 's') && source.Buffer[source.Size - 2] == '\'') { source.Size -= 2; } return true; }
public ITokenSource Edit(ITokenSource tokens) { CommonTokenStream s = new CommonTokenStream(tokens); HtmlParser parser = new HtmlParser(s); s.Fill(); var program = parser.htmlDocument(); visitor.Visit(program, s.GetTokens()); var list = visitor.GetResult(); var result = new ListTokenSource(list); result.TokenFactory = tokens.TokenFactory; return(result); }
public virtual ITokenSource Edit(ITokenSource tokens) { CommonTokenStream s = new CommonTokenStream(tokens); JsParser parser = new JsParser(s); s.Fill(); var program = parser.program(); visitor.Visit(program, s.GetTokens()); var source = new ListTokenSource(visitor.GetResult()); source.TokenFactory = tokens.TokenFactory; return(source); }
public bool ProcessTerm(ITokenSource source) { if (source.Size <= 2) { return(true); } if (source.Buffer[source.Size - 1] == '\'') // remove "boys' ball" suffix ' { source.Size--; } // remove "boy's ball" suffix 's else if ((source.Buffer[source.Size - 1] == 's') && source.Buffer[source.Size - 2] == '\'') { source.Size -= 2; } return(true); }
public ITokenSource Edit(ITokenSource tokens) { CommonTokenStream s = new CommonTokenStream(tokens); JsParser parser = new JsParser(s); s.Fill(); IVisitorTree v = new JsVisitorChangeLiteralString(factoryNames, tokens.TokenFactory); var program = parser.program(); v.Visit(program, s.GetTokens()); var source = new ListTokenSource(v.GetResult()); source.TokenFactory = tokens.TokenFactory; return(source); }
/// <summary> /// Parses the given token source using the specified grammar, starting with /// expression with the given name. /// </summary> /// <param name="expressionType">The type of the expression to start parsing.</param> /// <param name="tokenSource">The source of tokens.</param> public MatchResult Parse(string expressionType, ITokenSource tokenSource) { if (tokenSource == null) { throw new ArgumentNullException("tokenSource"); } Expression expression = grammar.Expression(expressionType); ParseAttempt attempt = new ParseAttempt(this, tokenSource); MatchResult result = expression.Match(attempt, String.Empty); // check that there are no trailing tokens if (result.IsMatch && attempt.GetToken() != null) { result.IsMatch = false; } return(result); }
/// <summary> /// Parses the given token source using the specified grammar, starting with /// expression with the given name. /// </summary> /// <param name="expressionType">The type of the expression to start parsing.</param> /// <param name="tokenSource">The source of tokens.</param> public MatchResult Parse(string expressionType, ITokenSource tokenSource) { if (tokenSource == null) { throw new ArgumentNullException("tokenSource"); } Expression expression = grammar.Expression(expressionType); ParseAttempt attempt = new ParseAttempt(this, tokenSource); MatchResult result = expression.Match(attempt, String.Empty); // check that there are no trailing tokens if (result.IsMatch && attempt.GetToken() != null) { result.IsMatch = false; } return result; }
private List<Result> Query(Query query, ITokenSource cancelToken) { if (query.KeywordComplete && query.HasArguments) { // check the cache for a matching result var cacheKey = query.Arguments; var cachedSuggestions = MemoryCache.Default.Get(cacheKey) as List<string>; // if cached result is found, return it. if (cachedSuggestions != null) { // convert the list of suggestion strings to a List<Result> if (cachedSuggestions.Any()) { var results = cachedSuggestions.Select(suggestion => new Result { Title = suggestion, Icon = _icon, SubTitle = "Search google for " + suggestion, Launch = query1 => { Process.Start($"http://google.co.uk/search?q={suggestion}"); AppCommands.HideWindow(); } }).ToList(); return results; } // no suggestions were received from the server return new List<Result> { new Result { Title = "No search suggestions found.", Icon = _icon } }; } // Cache miss, begin the background query to fill the cache // create a local cancel token for passing to httpclient.. var cancellable = new CancellationToken(); cancellable.Register(() => { cancelToken.Cancel(); cancelToken.Dispose(); }); var x = GetSuggestionsAsync(query.Arguments, cancellable); return new List<Result> { new Result { Title = "Retrieving search suggestions...", Icon = _icon } }; } // otherwise the query has not been provided yet, running the action will autocomplete the query return new List<Result> { new Result { Title = "Search Google", SubTitle = "Search Google with Suggestions", Icon = _icon, Launch = query1 => AppCommands.RewriteQuery(Keyword + ' ') } }; }
public UnbufferedTokenStream(ITokenSource tokenSource) : this(tokenSource, 256) { }
public TokenRewriteStream(ITokenSource tokenSource, int channel) : base(tokenSource, channel) { Init(); }
public TokenRewriteStream(ITokenSource tokenSource) : base(tokenSource) { Init(); }
public AntlrParserTokenStream(ITokenSource tokenSource) : base(tokenSource) { }
public ActionParser( ITokenSource lexer, StringTemplate self ) : this(lexer, 2) { this.self = self; }
public CommonTokenStream(ITokenSource tokenSource) : this(tokenSource, TokenChannels.Default) { }
/// <summary> /// Constructs a new /// <see cref="CommonTokenStream"/> /// using the specified token /// source and the default token channel ( /// <see cref="TokenConstants.DefaultChannel"/> /// ). /// </summary> /// <param name="tokenSource">The token source.</param> public CommonTokenStream(ITokenSource tokenSource) : base(tokenSource) { }
/// <summary> /// Constructs a new /// <see cref="CommonTokenStream"/> /// using the specified token /// source and filtering tokens to the specified channel. Only tokens whose /// <see cref="IToken.Channel()"/> /// matches /// <code>channel</code> /// or have the /// <see cref="IToken.Type()"/> /// equal to /// <see cref="TokenConstants.Eof"/> /// will be returned by the /// token stream lookahead methods. /// </summary> /// <param name="tokenSource">The token source.</param> /// <param name="channel">The channel to use for filtering tokens.</param> public CommonTokenStream(ITokenSource tokenSource, int channel) : this(tokenSource) { this.channel = channel; }
public TokenStreamRemovable(ITokenSource tokenSource) : base(tokenSource) { }
public TokenStreamRemovable(ITokenSource tokenSource, int channel) : base(tokenSource, channel) { }
public LegacyCommonTokenStream(ITokenSource tokenSource) : this() { this._tokenSource = tokenSource; }
/** <summary>Reset this token stream by setting its token source.</summary> */ public virtual void SetTokenSource( ITokenSource tokenSource ) { this._tokenSource = tokenSource; tokens.Clear(); p = -1; channel = TokenChannels.Default; }
protected ActionParser( ITokenSource lexer, int k ) : this(new CommonTokenStream( lexer )) { }
public HtmlBuilderTokenSource(ITokenSource source, Definitions definitions) { this.source = source; this.definitions = definitions; this.htmlBuffer = new StringBuilder(); }
public BufferedTokenStream(ITokenSource tokenSource) { this._tokens = new List<IToken>(100); this._p = -1; this._tokenSource = tokenSource; }
protected internal virtual IToken ConstructToken(ITokenSource tokenSource, int expectedTokenType, string tokenText, IToken current) { ITokenFactory factory = tokenSource.TokenFactory; return factory.Create(Tuple.Create(tokenSource, current.TokenSource.InputStream), expectedTokenType, tokenText, TokenConstants.DefaultChannel, -1, -1, current.Line, current.Column); }
public BufferedTokenStream(ITokenSource tokenSource) { this._tokenSource = tokenSource; }
public CommonTokenStream(ITokenSource tokenSource, int channel) : base(tokenSource) { this._channel = channel; }
public NadirTokenStream(ITokenSource tokenSource) : base(tokenSource) { }
public NadirTokenStream(ITokenSource tokenSource, int channel) : base(tokenSource, channel) { }
public SnapshotTokenFactory(ITextSnapshot snapshot, ITokenSource effectiveSource) { this.snapshot = snapshot; this.effectiveSource = Tuple.Create(effectiveSource, effectiveSource.InputStream); }