public AuthModule(ITokenizer tokenizer) : base("/auth") { Post["/"] = x => { var userName = (string)this.Request.Form.UserName; var password = (string)this.Request.Form.Password; var userIdentity = UserDatabase.ValidateUser(userName, password); if (userIdentity == null) { return HttpStatusCode.Unauthorized; } var token = tokenizer.Tokenize(userIdentity, Context); return new { Token = token, }; }; Get["/validation"] = _ => { this.RequiresAuthentication(); return "Yay! You are authenticated!"; }; Get["/admin"] = _ => { this.RequiresClaims(new[] { "admin" }); return "Yay! You are authorized!"; }; }
public TokenizerProcessingDecorator(ITokenizer tokenizer, Preprocessor preprocessor, Postprocessor postprocessor) { this.tokenizer = tokenizer; this.preprocessor = preprocessor; this.postprocessor = postprocessor; }
/// <summary> /// Parses the given Lua code into a IParseItem tree. /// </summary> /// <param name="input">The Lua code to parse.</param> /// <param name="name">The name of the chunk, used for exceptions.</param> /// <param name="hash">The hash of the Lua code, can be null.</param> /// <returns>The code as an IParseItem tree.</returns> /// <remarks>Simply calls Parse(Tokenizer, string, bool) with force:false.</remarks> public IParseItem Parse(ITokenizer input, string name, string hash) { if (input == null) throw new ArgumentNullException("input"); // check if the chunk is already loaded if (UseCache) { lock (_lock) { if (_cache != null && hash != null && _cache.ContainsKey(hash)) return _cache[hash]; } } // parse the chunk Token temp = new Token(); IParseItem read = ReadBlock(input, ref temp); Token end = Read(input, ref temp); if (end.Value != null) throw new SyntaxException(string.Format(Resources.TokenEOF, end.Value), input.Name, end); // store the loaded chunk in the cache lock (_lock) { if (_cache != null && hash != null) _cache[hash] = read; } return read; }
public HomeInstallationQuoteController( IWorkContext workContext, ISettingService settingService, IGenericAttributeService genericAttributeService, ILocalizationService localizationService, IMessageTokenProvider messageTokenProvider, IEmailAccountService emailAccountService, IEventPublisher eventPublisher, IMessageTemplateService messageTemplateService, ITokenizer tokenizer, IQueuedEmailService queuedEmailService, IProductService productService, CaptchaSettings captchaSettings, EmailAccountSettings emailAccountSettings) { _workContext = workContext; _settingService = settingService; _genericAttributeService = genericAttributeService; _localizationService = localizationService; _messageTokenProvider = messageTokenProvider; _emailAccountService = emailAccountService; _eventPublisher = eventPublisher; _messageTemplateService = messageTemplateService; _tokenizer = tokenizer; _queuedEmailService = queuedEmailService; _productService = productService; _captchaSettings = captchaSettings; _emailAccountSettings = emailAccountSettings; }
public MobSocialMessageService(IMessageTemplateService messageTemplateService, IStoreService storeService, IMessageTokenProvider messageTokenProvider, ILanguageService languageService, IStoreContext storeContext, IEventPublisher eventPublisher, ITokenizer tokenizer, IQueuedEmailService queuedEmailService, IEmailAccountService emailAccountService, EmailAccountSettings emailAccountSettings, ILocalizationService localizationService, MessageTemplatesSettings messageTemplateSettings, CatalogSettings catalogSettings, IProductAttributeParser productAttributeParser, IWorkContext workContext) { _messageTemplateService = messageTemplateService; _storeService = storeService; _messageTokenProvider = messageTokenProvider; _languageService = languageService; _storeContext = storeContext; _eventPublisher = eventPublisher; _tokenizer = tokenizer; _queuedEmailService = queuedEmailService; _emailAccountService = emailAccountService; _emailAccountSettings = emailAccountSettings; _localizationService = localizationService; _messageTemplateSettings = messageTemplateSettings; _catalogSettings = catalogSettings; _productAttributeParser = productAttributeParser; _workContext = workContext; }
public AuthModule(ITokenizer tokenizer) : base("/auth") { Post["/"] = x => { var userName = this.Request.Form.UserName; var password = this.Request.Form.Password; var userIdentity = new UserIdentity(); var token = tokenizer.Tokenize(userIdentity, Context); return new { Token = token }; }; Get["/validation"] = _ => { this.RequiresAuthentication(); return "Yay! You are authenticated!"; }; Get["/admin"] = _ => { this.RequiresClaims(new[] { "admin" }); return "Yay! You are authorized!"; }; }
public WorkflowMessageService( IMessageTemplateService messageTemplateService, IQueuedEmailService queuedEmailService, ILanguageService languageService, ITokenizer tokenizer, IEmailAccountService emailAccountService, IMessageTokenProvider messageTokenProvider, IStoreService storeService, IStoreContext storeContext, EmailAccountSettings emailAccountSettings, IEventPublisher eventPublisher, IWorkContext workContext, HttpRequestBase httpRequest, IDownloadService downloadServioce) { this._messageTemplateService = messageTemplateService; this._queuedEmailService = queuedEmailService; this._languageService = languageService; this._tokenizer = tokenizer; this._emailAccountService = emailAccountService; this._messageTokenProvider = messageTokenProvider; this._storeService = storeService; this._storeContext = storeContext; this._emailAccountSettings = emailAccountSettings; this._eventPublisher = eventPublisher; this._workContext = workContext; this._httpRequest = httpRequest; this._downloadServioce = downloadServioce; }
public static void AssertTokens(ITokenizer tokenizer, params Token[] tokens) { foreach (Token expected in tokens) { Assertion.AssertEquals(expected, tokenizer.NextToken()); } }
public AuthModule(IUserManagementApiClient client, ITokenizer tokenizer) { Get["/login"] = parameters => { return View["Index"]; }; Get["/logout"] = parameters => { return this.Logout("/"); }; Post["/login"] = parameters => { var model = this.Bind<AuthModel>(); var token = client.Post("", "/login", null, null, new[] { new KeyValuePair<string, string>("Username", model.Username), new KeyValuePair<string, string>("Password", model.Password) }); var userIdentity = tokenizer.Detokenize(token, Context, new DefaultUserIdentityResolver()); Context.CurrentUser = userIdentity; return token; }; Get["/forgotPassword/{username}"] = _ => { ViewBag.Message = client.Put("", UserManagementApiRoute.User.RequestResetPassword, new[] { new KeyValuePair<string, string>("username", _.username + "") }, null, null); return View["Login"]; }; }
public AuthModule(IConfiguration configuration, IAuthenticationManager authenticationManager, ITokenizer tokenizer) : base("auth") { Get["/setup"] = _ => string.IsNullOrEmpty(configuration.UserName); Post["/login"] = _ => { var loginParameters = this.Bind<LoginParameters>(); if (string.IsNullOrEmpty(configuration.UserName)) { SetAuth(configuration, loginParameters); } if (!authenticationManager.IsValid(loginParameters.UserName, loginParameters.Password)) { return HttpStatusCode.Unauthorized; } var identity = new UserIdentity(loginParameters.UserName, null); var token = tokenizer.Tokenize(identity, Context); return new { Token = token }; }; }
/// <summary> /// Constructs a new lexer for the C language. /// </summary> /// <param name="tokenizer">An object to return generic tokens.</param> public CLexer(ITokenizer<TokenType> tokenizer) { if (tokenizer == null) throw new ArgumentNullException("tokenizer"); this.localTokenizer = tokenizer; }
public WorkflowMessageService(IMessageTemplateService messageTemplateService, IQueuedEmailService queuedEmailService, ILanguageService languageService, ITokenizer tokenizer, IEmailAccountService emailAccountService, IMessageTokenProvider messageTokenProvider, IStoreService storeService, IStoreContext storeContext, EmailAccountSettings emailAccountSettings, IEventPublisher eventPublisher, ISMSSender smsSender, SMSSettings smsSettings) { this._messageTemplateService = messageTemplateService; this._queuedEmailService = queuedEmailService; this._languageService = languageService; this._tokenizer = tokenizer; this._emailAccountService = emailAccountService; this._messageTokenProvider = messageTokenProvider; this._storeService = storeService; this._storeContext = storeContext; this._emailAccountSettings = emailAccountSettings; this._eventPublisher = eventPublisher; this._smsSender = smsSender; this._smsSettings = smsSettings; }
/// <summary> /// Gets a Hashtable of words and integers representing the number of each word. /// </summary> /// <param name="input">The string to get the word frequency of.</param> /// <param name="caseSensitive">True if words should be treated as separate if they have different casing.</param> /// <param name="tokenizer">A instance of ITokenizer.</param> /// <param name="stopWordProvider">An instance of IStopWordProvider.</param> /// <returns></returns> public static Hashtable GetWordFrequency(string input, bool caseSensitive, ITokenizer tokenizer, IStopWordProvider stopWordProvider) { string convertedInput = input; if (!caseSensitive) convertedInput = input.ToLower(); string[] words = tokenizer.Tokenize(convertedInput); Array.Sort(words); string[] uniqueWords = GetUniqueWords(words); Hashtable result = new Hashtable(); for (int i = 0; i < uniqueWords.Length; i++) { if (stopWordProvider == null || (IsWord(uniqueWords[i]) && !stopWordProvider.IsStopWord(uniqueWords[i]))) { if (result.ContainsKey(uniqueWords[i])) result[uniqueWords[i]] = (int)result[uniqueWords[i]] + CountWords(uniqueWords[i], words); else result.Add(uniqueWords[i], CountWords(uniqueWords[i], words)); } } return result; }
public AuthModule(ILogger<AuthModule> logger, IUserManager userManager, ITokenizer tokenizer) : base("auth") { Get["/setup"] = _ => !userManager.HasUsers(); Post["/login"] = _ => { var userData = this.Bind<UserDto>(); // First login creates user if (!userManager.HasUsers()) { logger.Info("Creating user account {UserName}.", userData.UserName); userManager.CreateUser(userData.UserName, userData.Password); } var user = userManager.GetUser(userData.UserName, userData.Password); if (user == null) { logger.Warn("Invalid username/password: {UserName}.", userData.UserName); return HttpStatusCode.Unauthorized; } var identity = new UserIdentity(user.UserName, user.Claims); var token = tokenizer.Tokenize(identity, Context); return new { Token = token }; }; }
public AuthModule(ITokenizer tokenizer) : base("/auth") { Options["/"] = o => { return HttpStatusCode.OK; }; Post["/"] = x => { var userName = (string)this.Request.Form.UserName; var password = (string)this.Request.Form.Password; var userIdentity = UserDatabase.ValidateUser(userName, password); if(userIdentity == null) { return HttpStatusCode.Unauthorized; } var token = tokenizer.Tokenize(userIdentity, Context); return new { Token = token }; }; }
public TokenizedNameProvider(ITokenizer tokenizer, List<ITokenTransformer> tokenTransformers, List<IStringNameOptimizer> optimizers) { _tokenizer = tokenizer; _tokenTransformers = tokenTransformers; _optimizers = optimizers; }
private string ProcessParameter(ITokenizer tokenizer, string parameterName) { // the following code produces a parameter that supports Guids. For unknown reasons, NH // is supplying the parameter value as an unquoted set of alpha numerics, so, here they are processed // until the next token is NOT a dash int tokenCount = 0; string token = ""; //string regexDateFormat = GetDateFormatRegex(); tokenizer.SkipWhiteSpace = false; do { if (tokenizer.Current.Type != TokenType.BlockedText) { token += // TODO: the code below will not work until the parser can retain embedded comments //String.Format("/* {0} */ ", parameterName) + tokenizer.Current.Value; if (tokenizer.Current.Type != TokenType.WhiteSpace) tokenCount++; } tokenizer.ReadNextToken(); } while (tokenizer.HasMoreTokens && !tokenizer.IsNextToken(Constants.Comma)); tokenizer.SkipWhiteSpace = true; return tokenCount > 1 && !token.StartsWith("'") ? String.Format("'{0}'", token.Trim().ToUpper()) : token; }
public AuthModule(ITokenizer tokenizer, IAuthService authService) : base("auth") { this.tokenizer = tokenizer; this.authService = authService; Post["/token"] = x => { var request = this.CustomBindAndValidate<AuthenticateUserRequest>(); var identity = GetUserIdentity(request); var response = GetAuthenticateResponse(identity); return Negotiate .WithStatusCode(HttpStatusCode.OK) .WithModel(response); }; Post["/register"] = x => { var request = this.CustomBindAndValidate<RegisterUserRequest>(); authService.Register(request); var response = new RegisterUserResponse(); return Negotiate .WithStatusCode(HttpStatusCode.OK) .WithModel(response); }; }
public AuthModule(ITokenizer tokenizer, Users users) : base("/auth") { Post["/"] = x => { var request = this.Bind<AuthRequest>(); if (string.IsNullOrEmpty(request.Identifier)) { return HttpStatusCode.Unauthorized; } var user = users.ValidateUsernameOrEmail(request.Identifier, request.Password); if (user == null) { return HttpStatusCode.Unauthorized; } var userIdentity = new UserIdentity(user); var token = tokenizer.Tokenize(userIdentity, Context); return new { Token = token, Username = user.Username, UserId = user.Id }; }; Get["/"] = _ => { this.RequiresAuthentication(); return Context.CurrentUser; }; }
public WorkflowMessageService(IEmailTemplateService emailTemplateService, IQueuedEmailService queuedEmailService, IEmailAccountService emailAccountService, ITokenizer tokenizer) { this._emailTemplateService = emailTemplateService; this._queuedEmailService = queuedEmailService; _tokenizer = tokenizer; this._emailAccountService = emailAccountService; }
public AuthModule(ITokenizer tokenizer, GetByEmail getByEmail) : base("/auth") { _tokenizer = tokenizer; _getByEmail = getByEmail; Post["/", true] = Auth; }
/// <summary> /// Create a custom analyzer using only a tokenizer. /// </summary> /// <param name="name">Sets the name of the analyzer.</param> /// <param name="tokenizer">Sets the tokenizer of the analyzer.</param> public CustomAnalyzer(string name, ITokenizer tokenizer) : base(name, AnalyzerTypeEnum.Custom) { if (tokenizer == null) throw new ArgumentNullException("tokenizer", "CustomAnalyzer requires a tokenizer."); Tokenizer = tokenizer; }
public void Init() { var builder = new ContainerBuilder(); builder.RegisterType<Tokenizer>().As<ITokenizer>(); builder.RegisterType<TokenManager>().As<ITokenManager>(); builder.RegisterType<TestTokenProvider>().As<ITokenProvider>(); _container = builder.Build(); _tokenizer = _container.Resolve<ITokenizer>(); }
/// <summary> /// Initializes a new instance of the <see cref="AbstractAnalyzer" /> using the specified tokenizer and the analyzer weight. /// </summary> /// <param name="tokenizer">The tokenizer used by this analyzer.</param> /// <param name="weight">The analyzer weight.</param> /// <exception cref="System.ArgumentNullException">tokenizer</exception> public TokenizerAnalyzer(ITokenizer tokenizer, float weight) : base(weight) { if (tokenizer == null) throw new ArgumentNullException("tokenizer"); Tokenizer = tokenizer; PreProcess = true; }
private void ChangeState(ITokenizer tokenizer) { // don't add closing quote tokenizer.StrmReader.Read(); IState nextState = NewToken.Instance(); nextState.Read(tokenizer); }
/// <summary> /// Creates a new filter that will filter off any /// tokens that match the regular expression passed /// as argument. /// </summary> /// <param name="previous">the previous tokenizer in the chain</param> /// <param name="regex">the regular expression</param> public RegexTokenFilter(ITokenizer previous, string regex) : base(previous) { if (null == regex) { throw new ArgumentNullException("regex", "regex can't be null!"); } _regex = new Regex(regex); }
public SparseMatcher(ISparseDistance distance, IReworder reworder, IReader reader, ITokenizer tokenizer, string encyclopediaFilePath) { _distance = distance; _reader = reader; _reworder = reworder; _tokenizer = tokenizer; _encyclopediaFilePath = encyclopediaFilePath; }
public static void AssertTokens(ITokenizer tokenizer, ITokenFilter filter, params Token[] tokens) { ITokenizer actual = filter.Clone(tokenizer); foreach (Token expected in tokens) { Assertion.AssertEquals(expected, actual.NextToken()); } }
public AuthModule(IUserRepository userRepository, ITokenizer tokenizer, IPasswordHasher passwordHasher) : base("smack/auth") { _userRepository = userRepository; _tokenizer = tokenizer; _passwordHasher = passwordHasher; Post["/login"] = x => Login(); }
public static void AssertTokenValues(ITokenizer tokenizer, params string[] expectedValues) { foreach (string value in expectedValues) { Assertion.AssertEquals(value, tokenizer.NextToken().Value); } Assertion.AssertNull(tokenizer.NextToken()); }
public ProjectionPartDriver( IOrchardServices services, IRepository <QueryPartRecord> queryRepository, IProjectionManager projectionManager, IFeedManager feedManager, ITokenizer tokenizer, IDisplayHelperFactory displayHelperFactory, IWorkContextAccessor workContextAccessor) { _queryRepository = queryRepository; _projectionManager = projectionManager; _feedManager = feedManager; _tokenizer = tokenizer; _displayHelperFactory = displayHelperFactory; _workContextAccessor = workContextAccessor; T = NullLocalizer.Instance; Services = services; }
public Reindexer( IFullTextIndex fullTextIndex, IQueryParser queryParser, ITokenizer tokenizer, IFileWatcherService fileWatcherService, IDisposable disposable ) { this.fullTextIndex = fullTextIndex; this.queryParser = queryParser; this.tokenizer = tokenizer; this.fileWatcherService = fileWatcherService; this.disposable = disposable; indexChangesSubscription = fileWatcherService.Changes .ObserveOn(ThreadPoolScheduler.Instance) .Subscribe(IndexChanges); }
private ISqlNode ParseUpdateStatement(ITokenizer t) { // "UPDATE" <TopClause>? "SET" <SetList> <WhereClause>? var updateToken = t.Expect(SqlTokenType.Keyword, "UPDATE"); // TODO: TOP clause var table = ParseMaybeAliasedScalar(t, ParseVariableOrObjectIdentifier); var setList = ParseUpdateSetClause(t); // TODO: RETURNING clause var where = ParseWhereClause(t); return(new SqlUpdateNode { Location = updateToken.Location, Source = table, SetClause = setList, WhereClause = where }); }
/// <summary> /// ternaryExpression -> expression ? ternaryExpression : ternaryExpression /// </summary> private static double ComputeTernaryExpression(ITokenizer tokenizer) { var ternaryExpression = ComputeExpression(tokenizer); if (!tokenizer.Match(TokenType.QuestionMark)) { return(ternaryExpression); } var truthlyDouble = ComputeTernaryExpression(tokenizer); if (!tokenizer.Match(TokenType.Colon)) { throw new Exception("Expected :."); } var falsyDouble = ComputeTernaryExpression(tokenizer); return(ternaryExpression > 0 ? truthlyDouble : falsyDouble); }
private static IQueryPart CreateQueryPart(QueryToken queryToken, ITokenizer wordTokenizer) { var queryParts = CreateQueryParts(queryToken, wordTokenizer).ToList(); if (queryParts.Count == 0) { throw new QueryParserException(ExceptionMessages.ExpectedAtLeastOneQueryPartParsed); } IQueryPart part = queryParts[0]; for (var i = 1; i < queryParts.Count; i++) { part = ComposePart(part, queryParts[i]); } return(part); }
/// <inheritdoc /> public IQuery Parse(IIndexedFieldLookup fieldLookup, string queryText, ITokenizer tokenizer) { if (fieldLookup is null) { throw new ArgumentNullException(nameof(fieldLookup)); } IQueryPart?rootPart = null; var state = new QueryParserState(queryText); while (state.TryGetNextToken(out var token)) { rootPart = CreateQueryPart(fieldLookup, state, token, tokenizer, rootPart); } return(new Query(rootPart ?? EmptyQueryPart.Instance)); }
public CampaignService(ICustomerService customerService, IEmailSender emailSender, IEventPublisher eventPublisher, IMessageTokenProvider messageTokenProvider, IQueuedEmailService queuedEmailService, IRepository <Campaign> campaignRepository, IStoreContext storeContext, ITokenizer tokenizer) { _customerService = customerService; _emailSender = emailSender; _eventPublisher = eventPublisher; _messageTokenProvider = messageTokenProvider; _queuedEmailService = queuedEmailService; _campaignRepository = campaignRepository; _storeContext = storeContext; _tokenizer = tokenizer; }
public ProjectionManager( ITokenizer tokenizer, IEnumerable <IFilterProvider> filterProviders, IEnumerable <ISortCriterionProvider> sortCriterionProviders, IEnumerable <ILayoutProvider> layoutProviders, IEnumerable <IPropertyProvider> propertyProviders, IContentManager contentManager, IRepository <QueryPartRecord> queryRepository) { _tokenizer = tokenizer; _filterProviders = filterProviders; _sortCriterionProviders = sortCriterionProviders; _layoutProviders = layoutProviders; _propertyProviders = propertyProviders; _contentManager = contentManager; _queryRepository = queryRepository; T = NullLocalizer.Instance; }
public SessionFactory(string dir, ITokenizer tokenizer, IConfigurationService config) { Dir = dir; _log = Logging.CreateWriter("sessionfactory"); var tasks = new Task[1]; tasks[0] = LoadIndex(); _keys = LoadKeyMap(); _tokenizer = tokenizer; _config = config; _writableKeyMapStream = new FileStream( Path.Combine(dir, "_.kmap"), FileMode.Append, FileAccess.Write, FileShare.ReadWrite); Task.WaitAll(tasks); }
public WarmupSession( string collectionName, ulong collectionId, SessionFactory sessionFactory, ITokenizer tokenizer, IConfigurationProvider config, string baseUrl) : base(collectionName, collectionId, sessionFactory) { _config = config; _tokenizer = tokenizer; _httpQueue = new ProducerConsumerQueue <string>( int.Parse(_config.Get("write_thread_count")), callback: SubmitQuery); _postingsReader = new RemotePostingsReader(_config, collectionName); _http = new HttpClient(); _baseUrl = baseUrl; this.Log("initiated warmup session"); }
private MarkdownParsingResult <INode> ParseHeader(ITokenizer <IMdToken> tokenizer) { var header = tokenizer.Match(token => token.Has(Md.Header)); var boundedTokenizer = header.IfSuccess(t => { var bounded = t.UntilNotMatch(token => token.HasAny(Md.NewLine, Md.Break)); return(SkipWhiteSpaces(bounded)); }); var headerContent = boundedTokenizer.IfSuccess(t => ParseNodesUntilMatch(t, ParseFormattedText)); if (headerContent.Succeed) { return(headerContent.Remainder.UnboundTokenizer().SuccessWith <INode>( new HeaderNode(header.Parsed.Text.Length, headerContent.Parsed) )); } return(tokenizer.Fail <INode>()); }
public CameleoWorkflowMessageService(IMessageTemplateService messageTemplateService, IQueuedEmailService queuedEmailService, ITokenizer tokenizer, IEmailAccountService emailAccountService, IMessageTokenProvider messageTokenProvider, ILanguageService languageService, IStoreService storeService, IStoreContext storeContext, EmailAccountSettings emailAccountSettings, IEventPublisher eventPublisher, IWorkContext workContext) : base(messageTemplateService, queuedEmailService, languageService, tokenizer, emailAccountService, messageTokenProvider, storeService, storeContext, emailAccountSettings, eventPublisher) { this._messageTemplateService = messageTemplateService; this._queuedEmailService = queuedEmailService; this._languageService = languageService; this._tokenizer = tokenizer; this._emailAccountService = emailAccountService; this._messageTokenProvider = messageTokenProvider; this._storeService = storeService; this._storeContext = storeContext; this._workContext = workContext; this._emailAccountSettings = emailAccountSettings; this._eventPublisher = eventPublisher; }
public new static Argument Parsuj(ITokenizer tokenizer) { Argument argument = new Argument(); argument.argument = Wyrazenie.Parsuj(tokenizer); if (argument.argument == null) { return(null); } HCPSToken token = tokenizer.Next(); if (token != HCPSToken.CommaSeparator && token != HCPSToken.ParenthesisClose) { tokenizer.Back(); } argument.Check(); return(argument); }
public TestModule(ITokenizer tokenizer) : base("/test") { Post["/"] = parameters => { return("default"); }; Get["/simulation"] = parameters => { ContentsPrepare.Init(); Player[] player = { new Player(), new Player(), new Player() }; var data = PlayerDataDatabase.GetPlayerData("102"); if (data != null) { player[0].LoadPlayer(data); } data = PlayerDataDatabase.GetPlayerData("103"); if (data != null) { player[1].LoadPlayer(data); } data = PlayerDataDatabase.GetPlayerData("104"); if (data != null) { player[2].LoadPlayer(data); } Party users = new Party(PartyType.PLAYER, 10); foreach (Player p in player) { users.AddCharacter(p); } DungeonMaster newMaster = new DungeonMaster(); newMaster.Init(60, 4, users); return("turn : " + newMaster.Start()); }; }
public SqlMergeNode ParseMergeStatement(ITokenizer t) { var mergeToken = t.Expect(SqlTokenType.Keyword, "MERGE"); var mergeNode = new SqlMergeNode { Location = mergeToken.Location }; // TODO: "TOP" <maybeParenVariableOrNumericExpression> "PERCENT"? t.NextIs(SqlTokenType.Keyword, "INTO", true); mergeNode.Target = ParseMaybeAliasedTable(t, ParseObjectIdentifier); t.Expect(SqlTokenType.Keyword, "USING"); mergeNode.Source = ParseMaybeAliasedTable(t, ParseObjectIdentifier); t.Expect(SqlTokenType.Keyword, "ON"); mergeNode.MergeCondition = ParseBooleanExpression(t); while (true) { var whenClauseToken = t.MaybeGetKeywordSequence("WHEN", "NOT", "MATCHED", "BY", "SOURCE", "TARGET"); if (whenClauseToken == null) { break; } // TODO: "AND" <clauseSearchCondition> t.Expect(SqlTokenType.Keyword, "THEN"); if (whenClauseToken.Value == "WHEN MATCHED") { // TODO: Allow multiple mergeNode.Matched = ParseMergeMatched(t); } else if (whenClauseToken.Value == "WHEN NOT MATCHED" || whenClauseToken.Value == "WHEN NOT MATCHED BY TARGET") { mergeNode.NotMatchedByTarget = ParseMergeNotMatched(t); } else if (whenClauseToken.Value == "WHEN NOT MATCHED BY SOURCE") { // TODO: Allow multiple mergeNode.NotMatchedBySource = ParseMergeMatched(t); } } // TODO: Output clause // TODO: OPTION clause return(mergeNode); }
private static IEnumerable <TextSegment> GetSegments(ITokenizer <string, int, string> wordTokenizer, BsonDocument doc) { string prevRef = null; var sb = new StringBuilder(); var ops = (BsonArray)doc["ops"]; foreach (BsonDocument op in ops.Cast <BsonDocument>()) { // skip embeds if (!op.TryGetValue("insert", out BsonValue value) || value.BsonType != BsonType.String) { continue; } if (!op.TryGetValue("attributes", out BsonValue attrsValue)) { continue; } BsonDocument attrs = attrsValue.AsBsonDocument; if (!attrs.TryGetValue("segment", out BsonValue segmentValue)) { continue; } string curRef = segmentValue.AsString; if (prevRef != null && prevRef != curRef) { yield return(CreateSegment(wordTokenizer, prevRef, sb.ToString())); sb.Clear(); } string text = value.AsString; sb.Append(text); prevRef = curRef; } if (prevRef != null) { yield return(CreateSegment(wordTokenizer, prevRef, sb.ToString())); } }
private Node ParseTernaryExpression(ITokenizer tokenizer) { var condition = ParseExpression(tokenizer); if (!tokenizer.Match(TokenType.QuestionMark)) { return(condition); } var truthly = ParseTernaryExpression(tokenizer); if (!tokenizer.Match(TokenType.Colon)) { return(new TernaryNode(condition, truthly, new ErrorNode("Expected :."))); } var falsy = ParseTernaryExpression(tokenizer); return(new TernaryNode(condition, truthly, falsy)); }
public static string IdentifierFromClaim(this ITokenizer tokenizer, HttpRequestMessage request) { if (request == null) { throw new ArgumentNullException(nameof(request)); } if (request.Headers.Authorization == null) { return(null); } if (tokenizer == null) { throw new ArgumentNullException(nameof(tokenizer)); } var principal = tokenizer.Principal(request.Headers.Authorization.Parameter); return(principal.FindFirst("http://schemas.xmlsoap.org/ws/2005/05/identity/claims/nameidentifier")?.Value); }
private ISqlNode ParseVariableOrQualifiedIdentifier(ITokenizer t) { var next = t.GetNext(); // <Variable> if (next.IsType(SqlTokenType.Variable)) { return(new SqlVariableNode(next)); } t.PutBack(next); var identifier = ParseQualifiedIdentifier(t); if (identifier != null) { return(identifier); } throw ParsingException.CouldNotParseRule(nameof(ParseVariableOrQualifiedIdentifier), next); }
public MotivoDespidoModuleQueryMovil(ISerializeJsonRequest serializerJson, Func <string, IEncrytRequestProvider> encryptRequestProvider, ITokenizer tokenizer, IMotivoDespidoRepositoryReadOnly readOnlyRepository) { Post["/movil/motivosDespido"] = parameters => { var movilRequest = this.Bind <MovilRequest>(); var userId = tokenizer.Detokenize(movilRequest.token, Context); if (userId == null) { return(new Response().WithStatusCode(HttpStatusCode.Unauthorized)); } var motivos = readOnlyRepository.getAll(); var motivosString = serializerJson.toJson(motivos); var respuestaEncryptada = encryptRequestProvider(movilRequest.token).encryptString(motivosString); return(respuestaEncryptada); }; }
public ItemController( IOrchardServices orchardServices, IContentManager contentManager, ITransactionManager transactionManager, IShapeFactory shapeFactory, IRulesManager rulesManager, ITokenizer tokenizer, IWorkflowManager workflowManager) { Services = orchardServices; _contentManager = contentManager; _transactionManager = transactionManager; _rulesManager = rulesManager; _tokenizer = tokenizer; _workflowManager = workflowManager; T = NullLocalizer.Instance; Logger = NullLogger.Instance; Shape = shapeFactory; }
private SqlOrderByNode ParseSelectOrderByClause(ITokenizer t) { // "ORDER" "BY" <OrderTerm>+ ("OFFSET" <NumberOrVariable> "ROWS")? ("FETCH" "NEXT" <NumberOrVariable> "ROWS" "ONLY")? if (!t.NextIs(SqlTokenType.Keyword, "ORDER")) { return(null); } var orderByToken = t.GetNext(); t.Expect(SqlTokenType.Keyword, "BY"); var orderByItems = ParseList(t, ParseOrderTerm); return(new SqlOrderByNode { Location = orderByToken.Location, Entries = orderByItems }); }
/// <summary> /// Tokenizes the highlighted text (using a tokenizer appropriate for the /// selected language, and initiates the ParseThread to parse the tokenized /// text. /// </summary> public virtual void Parse() { if (textPane.GetText().Length == 0) { return; } // use endIndex+1 because substring subtracts 1 string text = Sharpen.Runtime.Substring(textPane.GetText(), startIndex, endIndex + 1).Trim(); if (parser != null && text.Length > 0) { //Tokenizer<? extends HasWord> toke = tlp.getTokenizerFactory().getTokenizer(new CharArrayReader(text.toCharArray())); ITokenizer <IHasWord> toke = tlp.GetTokenizerFactory().GetTokenizer(new StringReader(text)); IList <IHasWord> wordList = toke.Tokenize(); parseThread = new ParserPanel.ParseThread(this, wordList); parseThread.Start(); StartProgressMonitor("Parsing", ParseTime); } }
private YAlias GetAliasValueDependent(ITokenizer tokenizer) { if (tokenizer.Current.Value.Kind != TokenKind.Alias) { return(null); } var anchorName = tokenizer.Current.Value.Value; if (!tokenizer.Anchors.ContainsKey(anchorName)) { throw ParseException.Tokenizer(tokenizer, $"Not found anchorName: {anchorName}"); } var anchorValue = tokenizer.Anchors[anchorName]; tokenizer.MoveNext(); return(new YAlias(anchorName, anchorValue)); }
public AutorouteService( IAliasService aliasService, ITokenizer tokenizer, IContentDefinitionManager contentDefinitionManager, IContentManager contentManager, IRouteEvents routeEvents, ICultureManager cultureManager, IHttpContextAccessor httpContextAccessor, IAliasStorage aliasStorage) { _aliasService = aliasService; _tokenizer = tokenizer; _contentDefinitionManager = contentDefinitionManager; _contentManager = contentManager; _routeEvents = routeEvents; _aliasStorage = aliasStorage; _cultureManager = cultureManager; _httpContextAccessor = httpContextAccessor; }
private ISqlNode ParseInsertStatement(ITokenizer t) { // "INSERT" "INTO" <ObjectIdOrVariable> "(" <ColumnList> ")" <ValuesOrSelect> var insertToken = t.Expect(SqlTokenType.Keyword, "INSERT"); t.Expect(SqlTokenType.Keyword, "INTO"); var insertNode = new SqlInsertNode { Location = insertToken.Location, Table = ParseObjectIdentifier(t), Columns = ParseInsertColumnList(t) }; // TODO: OUTPUT Clause var next = t.Peek(); if (next.IsKeyword("VALUES")) { insertNode.Source = ParseValues(t); } else if (next.IsKeyword("SELECT")) { insertNode.Source = ParseQueryExpression(t); } else if (next.IsKeyword("EXEC", "EXECUTE")) { insertNode.Source = ParseExecute(t); } else if (next.IsKeyword("DEFAULT")) { t.GetNext(); t.Expect(SqlTokenType.Keyword, "VALUES"); insertNode.Source = new SqlKeywordNode("DEFAULT VALUES"); } else { throw new ParsingException("INSERT INTO statement does not have a source"); } return(insertNode); }
/// <summary> /// Changes the state. /// </summary> /// <param name="tokenizer">The tokenizer.</param> private void ChangeState(ITokenizer tokenizer) { //if (new StackTrace().FrameCount > 1700) // throw new Exception("sdfgdfgdgdfgdfgdfgdfg"); StreamReader streamReader = tokenizer.StrmReader; IState nextState = null; while (!streamReader.EndOfStream) { char peekedChar = (char)streamReader.Peek(); // the order of these ifs matters - // IsIntegerConstant must be checked before Identifier. if (this.IsPossibleKeyword(peekedChar)) { nextState = Keyword.Instance(); } else if (this.IsWhiteSpace(peekedChar)) { tokenizer.StrmReader.Read(); nextState = NewToken.Instance(); } else if (this.IsSymbol(peekedChar)) { nextState = Symbol.Instance(); } else if (this.IsIntegerConstant(peekedChar)) { nextState = IntegerConstant.Instance(); } else if (this.IsPossibleIdentifierCharacter(peekedChar)) { nextState = Identifier.Instance(); } // nasty hack to get over stackoverflow - don't call recusive on each new token if (nextState != null && nextState.GetType() != typeof(NewToken)) { nextState.Read(tokenizer); } } }
public UpdateTicketActivity( ITokenizer tokenizer, ICRMContentOwnershipService contentOwnershipService, IContentManager contentManager, IBasicDataService basicDataService, IRepository <TicketIdentityRecord> ticketIdentityRecordRepository, IActivityStreamService activityStreamService, IRepository <ContentItemPermissionDetailRecord> contentItemPermissionDetailRepository) { this.activityStreamService = activityStreamService; this.tokenizer = tokenizer; this.ticketIdentityRecordRepository = ticketIdentityRecordRepository; this.contentOwnershipService = contentOwnershipService; this.basicDataService = basicDataService; this.contentItemPermissionDetailRepository = contentItemPermissionDetailRepository; this.contentManager = contentManager; T = NullLocalizer.Instance; Logger = NullLogger.Instance; }
public void Init() { var builder = new ContainerBuilder(); builder.RegisterType <StubOrchardServices>().As <IOrchardServices>(); builder.RegisterType <TokenManager>().As <ITokenManager>(); builder.RegisterType <Tokenizer>().As <ITokenizer>(); builder.RegisterType <DateTokens>().As <ITokenProvider>(); builder.RegisterType <StubClock>().As <IClock>(); builder.RegisterType <CultureDateTimeFormatProvider>().As <IDateTimeFormatProvider>(); builder.RegisterType <DefaultDateServices>().As <IDateServices>(); builder.RegisterType <StubWorkContextAccessor>().As <IWorkContextAccessor>(); builder.RegisterType <SiteCalendarSelector>().As <ICalendarSelector>(); builder.RegisterType <DefaultCalendarManager>().As <ICalendarManager>(); _container = builder.Build(); _tokenizer = _container.Resolve <ITokenizer>(); _clock = _container.Resolve <IClock>(); }
public SendEmailToFollowersActivity( IOrchardServices services, IMessageService messageService, IJobsQueueService jobsQueueService, IRepository <EmailTemplateRecord> emailTemplateRepository, ITokenizer tokenizer, IRepository <BusinessUnitMemberPartRecord> businessUnitMembersRepository, IRepository <ActivityStreamRecord> activityStreamRepository, IRepository <TeamMemberPartRecord> teamMembersRepository, IRepository <UserPartRecord> userRepository) : base(messageService, jobsQueueService, businessUnitMembersRepository, teamMembersRepository, userRepository) { this.activityStreamRepository = activityStreamRepository; this.services = services; this.tokenizer = tokenizer; this.emailTemplateRepository = emailTemplateRepository; T = NullLocalizer.Instance; Logger = NullLogger.Instance; }