private KeyValuePair <string, ITokenProcessor> LoadProcessor(XElement element) { ITokenProcessor processor = null; var name = element.Attribute("name")?.Value ?? null; var target = element.Attribute("target")?.Value ?? null; if (string.IsNullOrWhiteSpace(name)) { name = target; } if (string.IsNullOrWhiteSpace(target)) { target = name; } switch (target) { case "integer-export": processor = new IntegerExport(); break; case "lower-case": processor = new LowerCaseProcessor(); break; case "ticket-export": processor = new TicketsExport(); break; case "table-export": processor = new TableExport(); break; } if (processor != null) { processor.MLang = this; processor.Init(element); } return(new KeyValuePair <string, ITokenProcessor>(name, processor)); }
public EmailSender(IEmailService emailService, ITokenProcessor tokenProcessor, IEmailTemplateService emailTemplateService, IUserService userService) { _emailService = emailService; _tokenProcessor = tokenProcessor; _emailTemplateService = emailTemplateService; _userService = userService; }
/// <summary> /// Get Postings /// </summary> /// <param name="index">Index</param> /// <param name="processor">Tokene processor</param> /// <returns></returns> public IList <Posting> GetPostings(IIndex index, ITokenProcessor processor) { //Process the term List <string> processedTerms = processor.ProcessToken(Term); //Gets a or-merged posting list from all results of multiple terms from index... return(index.GetPostings(processedTerms)); }
public ITokenProcessor CreateTokenProcessor() { ITokenProcessor[] tokenProcessors = new ITokenProcessor[_tokens.Length]; for (int i=0;i<_tokens.Length;i++) tokenProcessors[i] = _tokens[i].CreateTokenProcessor(); return new CompositeTokenProcessor(tokenProcessors); }
public ITokenProcessor CreateTokenProcessor() { ITokenProcessor[] tokenProcessors = new ITokenProcessor[_matchers.Length]; for (int i = 0; i < _matchers.Length; i++) tokenProcessors[i] = _matchers[i].CreateTokenProcessor(); return new MatchProcessor(tokenProcessors); }
public UserController(IUnitOfWork unitOfWork, ICryptography cryptography, ITokenProcessor tokenProcessor, TokenConfiguration tokenConfiguration, IRandomGenerator randomGenerator, IEmailer emailer) : base(unitOfWork) { _cryptography = cryptography; _tokenProcessor = tokenProcessor; _randomGenerator = randomGenerator; _emailer = emailer; _tokenConfiguration = tokenConfiguration; }
public ChangeProcessor( ILogger <ChangeProcessor> logger, IQueueContext queueContext, ITokenProcessor tokenProcessor, IListProcessor listProcessor) { _logger = logger; _queueContext = queueContext; _tokenProcessor = tokenProcessor; _listProcessor = listProcessor; }
public ITokenProcessor CreateTokenProcessor() { ITokenProcessor[] tokenProcessors = new ITokenProcessor[_tokens.Length]; for (int i = 0; i < _tokens.Length; i++) { tokenProcessors[i] = _tokens[i].CreateTokenProcessor(); } return(new CompositeTokenProcessor(tokenProcessors)); }
public ITokenProcessor CreateTokenProcessor() { ITokenProcessor[] tokenProcessors = new ITokenProcessor[_matchers.Length]; for (int i = 0; i < _matchers.Length; i++) { tokenProcessors[i] = _matchers[i].CreateTokenProcessor(); } return(new MatchProcessor(tokenProcessors)); }
public void Register(ITokenProcessor processor) { if (tokenProcessors.ContainsKey(processor.Token)) { throw new ArgumentException( String.Format(@"Duplicate processor. Processor for token '{0}' already registered." , processor.Token) ); } tokenProcessors.Add(processor.Token, processor); }
private void ProcessExpression(int index) { string toParse = _expression[index]; IParseResult tokenParseResult = TokenParser.Parse(toParse); Type resultType = tokenParseResult.GetType(); ITokenProcessor <IParseResult> processor = _tokenProcessorDictionary[resultType]; Action result = processor.ProcessToken(tokenParseResult, _resultTokens, OutputProcessor); if (result != null) { result.Invoke(); } }
/// <summary> /// Get Postings /// </summary> /// <param name="index">Index</param> /// <param name="processor">Tokene processor</param> /// <returns></returns> public IList <Posting> GetPostings(IIndex index, ITokenProcessor processor) { //list of posting lists from all query components to be OR-merged List <IList <Posting> > postingLists = new List <IList <Posting> >(); //for each components foreach (IQueryComponent qc in mComponents) { //get a posting list and add it to the collection postingLists.Add(qc.GetPostings(index, processor)); } return(Merge.OrMerge(postingLists)); }
/// <summary> /// Get Postings /// </summary> /// <param name="index">Index</param> /// <param name="processor">Tokene processor</param> /// <returns></returns> public IList<Posting> GetPostings(IIndex index, ITokenProcessor processor) { //A list of posting lists (postings for each term in the phrase) List<IList<Posting>> postingLists = new List<IList<Posting>>(); //Retrieves the postings for the individual terms in the phrase foreach (string term in mTerms) { //Process the term List<string> processedTerms = processor.ProcessToken(term); postingLists.Add(index.GetPositionalPostings(processedTerms)); } //positional merge all posting lists return Merge.PositionalMerge(postingLists); }
/// <summary> /// процессор для работы с токеном /// </summary> public void SetRequired(ITokenProcessor processor, string successurl) { m_tokenProcessor = processor; HttpSessionState session = HttpContext.Current.Session; if (session != null) { if (session["TokenProcessor"] == null) { session["TokenProcessor"] = processor; } session["SuccessUrl"] = successurl; } }
public override void OnActionExecuting(ActionExecutingContext context) { IUnitOfWork unitOfWork = (IUnitOfWork)context.HttpContext.RequestServices.GetService(typeof(IUnitOfWork)); ITokenProcessor tokenProcessor = (ITokenProcessor)context.HttpContext.RequestServices.GetService(typeof(ITokenProcessor)); string token = context.HttpContext.Request.Headers["authentication"]; if (!token.IsSet()) { context.HttpContext.Response.StatusCode = (int)HttpStatusCode.Forbidden; context.Result = new JsonResult("Access token is not sent with the request. Check your request headers!"); } else { bool addClaimsToHttpContext = true; //ToDo :: encrypt whole jwt token with hardcoded key UserTokenDto user = unitOfWork.UserRepository.GetByAccessToken(token); if (user == null) { context.HttpContext.Response.StatusCode = (int)HttpStatusCode.Forbidden; context.Result = new JsonResult("User doesn't exists. Check Access Token!"); addClaimsToHttpContext = false; } else if (DateTime.Now > user.TokenExpirationDateTime) { context.HttpContext.Response.StatusCode = (int)HttpStatusCode.Unauthorized; context.Result = new JsonResult("Bad access token!"); addClaimsToHttpContext = false; } if (addClaimsToHttpContext) { IEnumerable <Claim> claims = tokenProcessor.GetTokenClaims(token); if (claims != null) { ClaimsIdentity appIdentity = new ClaimsIdentity(claims); context.HttpContext.User.AddIdentity(appIdentity); } } } base.OnActionExecuting(context); }
public IList <Posting> GetPostings(IIndex index, ITokenProcessor processor) { //Get postings for the two term List <string> termsFromFirst = processor.ProcessToken(firstTerm); List <string> termsFromSecond = processor.ProcessToken(secondTerm); IList <Posting> firstPostings = index.GetPositionalPostings(termsFromFirst); IList <Posting> secondPostings = index.GetPositionalPostings(termsFromSecond); //PositionalMerge to any postings found with gap(distance) 1 to k (up to k) List <IList <Posting> > list = new List <IList <Posting> >(); for (int i = 1; i <= k; i++) { list.Add(Merge.PositionalMerge(firstPostings, secondPostings, i)); } //OrMerge all of them return(Merge.OrMerge(list)); }
public string TranslateToken(string originalToken, ITokenProcessor tokenProcessor) { if (originalToken.IndexOf('\\') < 0) { return(originalToken); } string token = originalToken; token = token.Replace("\\n", "\n"); token = token.Replace("\\r", "\r"); token = token.Replace("\\t", "\t"); token = token.Replace("\\\"", "\""); if (token.IndexOf("\\u") >= 0) { token = Regex.Replace(token, @"\\[uU][a-fA-F0-9]{4}", m => ((char)uint.Parse(m.Value.Substring(2), NumberStyles.HexNumber)).ToString()); } token = token.Replace("\\\\", "\\"); return(token); }
public Administration(ITokenProcessor processor) { m_tokenProcessor = processor; }
public string TranslateToken(string originalToken, ITokenProcessor tokenProcessor) { return originalToken; }
public string TranslateToken(string originalToken, ITokenProcessor tokenProcessor) { return(originalToken); }
public MatchProcessor(ITokenProcessor[] matchers) { _tokenProcessors = matchers; }
public MatchProcessor(ITokenMatcher matcher) { _processor = matcher.CreateTokenProcessor(); }
public string TranslateToken(string originalToken, ITokenProcessor tokenProcessor) { return _tokenMatcher.TranslateToken(originalToken, tokenProcessor); }
public string TranslateToken(string originalToken, ITokenProcessor tokenProcessor) { return(originalToken.Substring(((MatchProcessor)tokenProcessor).Skipped)); }
/// <summary> /// Get list of posting /// </summary> /// <param name="index"> inverted index</param> /// <param name="processor">nomal token processor</param> /// <returns></returns> public IList <Posting> GetPostings(IIndex index, ITokenProcessor processor) { processor = ((NormalTokenProcessor)processor); //Normal proccessing of token and split them into literal by * string[] literals = this.token.Split("*").ToArray(); for (int i = 0; i < literals.Length; i++) { List <string> processedToken = processor.ProcessToken(literals[i]); if (processedToken.Count > 0) { if (i == 0) { literals[i] = "$" + processedToken[0]; } else if (i == literals.Length - 1) { literals[i] = processedToken[0] + "$"; } else { literals[i] = processedToken[0]; } } } literals = literals.Where(x => !string.IsNullOrEmpty(x) && x != "$").ToArray(); //Gather candidates for each literals List <List <string> > candidatesList = new List <List <string> >(); foreach (string literal in literals) { List <string> candidates = new List <String>(); bool didMerge = false; //KGram and AND merge results for a literal List <string> kGramTerms = this.KGramSplitter(literal); foreach (string kGramTerm in kGramTerms) { if (!didMerge) { candidates = candidates.Union(this.kGram.getVocabularies(kGramTerm)).ToList(); didMerge = true; } else { candidates = candidates.Intersect(this.kGram.getVocabularies(kGramTerm)).ToList(); } } //Post filtering step if (candidates.Count > 0) { //$literal* if (literal.ElementAt(0) == '$' && literal.ElementAt(literal.Length - 1) != '$') { candidates = candidates.Where(s => s.StartsWith(literal.Substring(1))).ToList(); } // *literal$ else if (literal.ElementAt(0) != '$' && literal.ElementAt(literal.Length - 1) == '$') { candidates = candidates.Where(s => s.EndsWith(literal.Substring(0, literal.Length - 1))).ToList(); } // *literal* else if (literal.ElementAt(0) != '$' && literal.ElementAt(literal.Length - 1) != '$') { candidates = candidates.Where(s => s.Contains(literal) && !s.StartsWith(literal) && !s.EndsWith(literal)).ToList(); } candidatesList.Add(candidates); } else { candidatesList.Add(new List <string>()); } } //Generate the final candidates by merging candidates from all literals List <string> finalCandidates = new List <string>(); for (int i = 0; i < candidatesList.Count; i++) { if (i == 0) { finalCandidates = finalCandidates.Union(candidatesList[i]).ToList(); } else { finalCandidates = finalCandidates.Intersect(candidatesList[i]).ToList(); } } //Stem final candidates and remove duplicate HashSet <string> stemmedFinalCandidates = new HashSet <string>(); foreach (string s in finalCandidates) { stemmedFinalCandidates.Add(stemmer.Stem(s).Value); } return(index.GetPostings(stemmedFinalCandidates.ToList())); }
string ITokenMatcher.TranslateToken(string originalToken, ITokenProcessor tokenProcessor) { return(TranslateToken(originalToken, (CompositeTokenProcessor)tokenProcessor)); }
string ITokenMatcher.TranslateToken(string originalToken, ITokenProcessor tokenProcessor) { return(TranslateToken(originalToken, (WrappedExpressionMatcher)tokenProcessor)); }
public string TranslateToken(string originalToken, ITokenProcessor tokenProcessor) { return(_matcher.TranslateToken(originalToken, tokenProcessor)); }
public TokenMatcher(ITokenMatcher tokenMatcher) { _tokenProcessor = tokenMatcher.CreateTokenProcessor(); _tokenMatcher = tokenMatcher; }
public EmailService(IEmailMessageService emailService, ITokenProcessor tokenProcessor, IEmailTemplateService emailTemplateService) { _emailService = emailService; _tokenProcessor = tokenProcessor; _emailTemplateService = emailTemplateService; }
public string TranslateToken(string originalToken, ITokenProcessor tokenProcessor) { return originalToken.Substring(((MatchProcessor) tokenProcessor).Skipped); }
string ITokenMatcher.TranslateToken(string originalToken, ITokenProcessor tokenProcessor) { return TranslateToken(originalToken, (CompositeTokenProcessor) tokenProcessor); }
public void Initialize() { _tokenProcessor = Resolve <ITokenProcessor>(); }
public CompositeTokenProcessor(ITokenProcessor[] tokens) { _tokenProcessors = tokens; _startIndexes = new int[tokens.Length]; }
public TokenProcessorsUnitTest() { normalProcesser = new NormalTokenProcessor(); stemmingProcessor = new StemmingTokenProcesor(); }
public string TranslateToken(string originalToken, ITokenProcessor tokenProcessor) { if (originalToken.IndexOf('\\') < 0) return originalToken; string token = originalToken; token = token.Replace("\\n", "\n"); token = token.Replace("\\r", "\r"); token = token.Replace("\\t", "\t"); token = token.Replace("\\\"", "\""); if (token.IndexOf("\\u") >= 0) { token = Regex.Replace(token, @"\\[uU][a-fA-F0-9]{4}", m => ((char)uint.Parse(m.Value.Substring(2), NumberStyles.HexNumber)).ToString()); } token = token.Replace("\\\\", "\\"); return token; }
public AuthController(ITokenProcessor itokenprocessor, IOptions <JWTConfiguration> jwtConfiguration) { this.itokenprocessor = itokenprocessor; this.jwtConfiguration = jwtConfiguration.Value; }
public void ProcessRequest(HttpContext context) { // если запрос скриптов if (context.Request.QueryString[Utils.STR_URL_JAVA_PARAM] != null) { context.Response.ContentType = "text/javascript"; SendLocalizeScript(); context.Response.End(); return; } if (context.Request.Headers[Utils.StrRequestType] != Utils.StrAjaxRequest) return; // парсим строку запроса, создаем строго наш экземпляр _mRequest = new CMessageRequest(context); if (_mRequest.notValid) { _mResponse = new CMessageResponse( _mRequest.err, CMessageResponse.EMessageResponseType.Error); } if (context.Session == null) { _mResponse = new CMessageResponse("Session needs to be activated", CMessageResponse.EMessageResponseType.Error); } // берем TokenProcessor и урл из сессии if (_mResponse == null && ((TokenProcessor = (ITokenProcessor) context.Session["TokenProcessor"]) == null || (_successUrl = (string) context.Session["SuccessUrl"]) == null)) { _mResponse = new CMessageResponse( "No ITokenProcessor or SuccessUrl", CMessageResponse.EMessageResponseType.Error); } if (_mResponse == null) { _mContext = context; try { // используем рефлексию для вызова методов по имени, переданному в запросе GetType().InvokeMember(_mRequest.act, BindingFlags.InvokeMethod, null, this, new object[] {}); } catch (Exception) { _mResponse = new CMessageResponse( "Method error. Check request", CMessageResponse.EMessageResponseType.Error); } } if (_mResponse == null) _mResponse = new CMessageResponse("Not valid request", CMessageResponse.EMessageResponseType.Error); context.Response.ContentType = "application/json"; context.Response.Write(_mResponse.ToJson()); context.Response.End(); }
string ITokenMatcher.TranslateToken(string originalToken, ITokenProcessor tokenProcessor) { return TranslateToken(originalToken, (WrappedExpressionMatcher) tokenProcessor); }