///// <summary> ///// Normalization divisor for document ///// </summary> ///// <value> ///// The index of the TFN. ///// </value> //protected Dictionary<SpaceDocumentModel, Double> TFN_index { get; set; } = new Dictionary<SpaceDocumentModel, Double>(); public override void PrepareTheModel(SpaceModel _space, ILogBuilder log) { var space = _space; TokenDictionary training_terms = space.GetTerms(true, true); List <SpaceLabel> labels = space.labels.ToList(); shortName = GetFunctionName(computation); if (!IsEnabled) { return; } switch (computation) { case TFComputation.modifiedTF: SqrTc = Math.Sqrt(training_terms.GetSumFrequency()); break; default: //foreach (SpaceDocumentModel document in space.documents) //{ // TFN_index.Add(document, GetDivisor(document)); //} break; } index = training_terms.ToFrequencyDictionary(); }
private bool CreateListFromString(string operation) { tokens.Clear(); for (int i = 0; i < operation.Length; i++) { string character = operation[i].ToString(CultureInfo.CurrentCulture); if (TokenDictionary.ContainsKey(character)) { Token n = new Token(TokenDictionary[character]); if (tokens.Count > 0) { Token previousToken = tokens[tokens.Count - 1]; if (previousToken.TypeToken == TypeToken.Number && n.TypeToken == TypeToken.Number) { previousToken.ExpressionToken += n.ExpressionToken; previousToken.ValToken = double.Parse(previousToken.ExpressionToken, CultureInfo.CurrentCulture); } else { tokens.Add(n); } } else { tokens.Add(n); } } else { return(false); } } return(true); }
private void BuildTokenCache() { foreach (TokenDefinition tokenDefinition in _tokens) { foreach (string token in tokenDefinition.GetTokens()) { var tokenKey = Regex.Unescape(token); if (TokenDictionary.ContainsKey(tokenKey)) { continue; } int before = _web.Context.PendingRequestCount(); string value = tokenDefinition.GetReplaceValue(); int after = _web.Context.PendingRequestCount(); if (before != after) { throw new Exception($"Token {token} triggered an ExecuteQuery on the 'current' context. Please refactor this token to use the TokenContext class."); } TokenDictionary[tokenKey] = value; if (tokenDefinition is ListIdToken) { ListTokenDictionary[tokenKey] = tokenDefinition; } } } }
public void Format( ) { TokenDictionary tokenDictionary = new TokenDictionary( ); string name, @namespace; if (GuerillaCs.SplitNamespaceFromFieldName(Value.Name, out name, out @namespace)) { this.Value.Name = tokenDictionary.GenerateValidToken(GuerillaCs.ToTypeName(name)); this.Namespace = GuerillaCs.ToTypeName(@namespace); } else { this.Value.Name = tokenDictionary.GenerateValidToken(GuerillaCs.ToTypeName(this.Value.Name)); } FormatFieldNames(tokenDictionary); foreach (var item in EnumDefinitions) { item.Format( ); } foreach (var item in ClassDefinitions) { item.Format( ); } }
public void DumpTagLayout(MoonfishTagGroup tag, string folder) { _definitionsDictionary.Clear(); var info = BeginProcessTagBlockDefinition(tag.Definition); using (var stream = new FileStream(Path.Combine(folder, info.Name + ".cs"), FileMode.Create, FileAccess.Write, FileShare.ReadWrite)) { var size = tag.Definition.CalculateSizeOfFieldSet(); var hasParent = h2Tags.Any(x => x.Class == tag.ParentClass); if (hasParent) { var parentTag = new MoonfishTagGroup(h2Tags.First(x => x.Class == tag.ParentClass)); info.BaseClass = new ClassInfo(TokenDictionary.GenerateValidIdentifier( ToTypeName(parentTag.Definition.Name))); // loop through all the parents summing up thier sizes while (hasParent) { size += parentTag.Definition.CalculateSizeOfFieldSet(); hasParent = h2Tags.Any(x => x.Class == parentTag.ParentClass); if (hasParent) { parentTag = new MoonfishTagGroup(h2Tags.First(x => x.Class == parentTag.ParentClass)); } } } else { info.BaseClass = new ClassInfo("GuerillaBlock"); } var alignment = tag.Definition.Alignment; var property = info.Properties.Single(x => x.Name == "SerializedSize"); property.GetBody = string.Format("return {0};", size); info.Attributes.Add(new AttributeInfo(typeof(TagClassAttribute)) { Parameters = { "\"" + tag.Class + "\"" } }); var streamWriter = new StreamWriter(stream); info.Generate(); GenerateOutputForClass(info, streamWriter); } var localDefinitions = _definitionsDictionary.Select(x => x.Value); foreach (var item in localDefinitions) { using (var stream = new FileStream(Path.Combine(folder, item.Name + ".cs"), FileMode.Create, FileAccess.Write, FileShare.ReadWrite)) { item.Generate(); GenerateOutputForClass(item, new StreamWriter(stream)); } } }
public AuthenticateResponse Refresh(int id, string currentToken) { var user = _context.Users.SingleOrDefault(x => x.Id == id); if (user is null) { return(null); } user.UsersModules = _context.UsersModules .Where(x => x.IdUser == user.Id) .Join(_context.Modules, x => x.IdModule, y => y.Id, (x, y) => new UsersModules { Id = x.Id, IdUser = user.Id, User = user, IdModule = y.Id, Module = y }).ToList(); TokenDictionary.Remove(id, currentToken); var token = GenerateToken(user); if (token is null) { return(null); } return(new AuthenticateResponse(user, token)); }
public void TestTokenDictionaryBackwardResolution() { var tokens = new TokenDictionary(); tokens.Set("anotherName", Token.FromString(TokenReplacement.TokenPlaceHolder("name"))); tokens.Set("name", Token.FromString("testName")); Assert.AreEqual(expected: "testName", actual: tokens.GetString("anotherName")); }
public static JwtBearerOptions DefaultJwtBearerOptions(this JwtBearerOptions options, AppSettings settings) { options.RequireHttpsMetadata = false; options.SaveToken = true; options.TokenValidationParameters = new TokenValidationParameters { ValidateIssuer = true, ValidIssuer = settings.Issuer, ValidateAudience = true, ValidAudience = settings.Audience, RequireSignedTokens = false, ClockSkew = TimeSpan.Zero, TokenDecryptionKey = new X509SecurityKey(new X509Certificate2(settings.PrivateKey, settings.PasswordCertificate)) }; options.Events = new JwtBearerEvents { OnChallenge = context => { context.HandleResponse(); context.Response.ContentType = "application/json"; context.Response.StatusCode = 401; var content = new { error = context.Error, description = context.ErrorDescription, statusCode = context.Response.StatusCode }; return(context.Response.WriteAsync(JsonConvert.SerializeObject(content))); }, OnTokenValidated = context => { var claim = context.Principal.Claims.SingleOrDefault(x => x.Type == "id"); if (claim is null) { return(Task.CompletedTask); } var id = Convert.ToInt32(claim.Value); var token = (context.SecurityToken as JwtSecurityToken).RawData; var tokens = TokenDictionary.GetTokens(id); if (tokens is null) { return(Task.CompletedTask); } if (!tokens.Any(x => x.Value.Equals(token, StringComparison.OrdinalIgnoreCase))) { context.Fail("Token invalid"); } return(Task.CompletedTask); } }; return(options); } }
/// <summary> /// Parses given string for a webpart making sure we only parse the token for a given web /// </summary> /// <param name="input">input string</param> /// <param name="web">filters the tokens on web id</param> /// <param name="tokensToSkip">array of tokens to skip</param> /// <returns>Returns parsed string for a webpart</returns> public string ParseStringWebPart(string input, Web web, params string[] tokensToSkip) { web.EnsureProperty(x => x.Id); var tokenChars = new[] { '{', '~' }; if (string.IsNullOrEmpty(input) || input.IndexOfAny(tokenChars) == -1) { return(input); } BuildTokenCache(); // Optimize for direct match with string search if (TokenDictionary.TryGetValue(input, out string directMatch)) { return(directMatch); } // Support for non cached tokens var nonCachedTokens = BuildNonCachedTokenCache(); if (nonCachedTokens.TryGetValue(input, out string directMatchNonCached)) { return(directMatchNonCached); } string output = input; bool hasMatch = false; do { hasMatch = false; output = ReToken.Replace(output, match => { string tokenString = match.Groups[0].Value; if (TokenDictionary.TryGetValue(tokenString, out string val)) { if (tokenString.IndexOf("listid", StringComparison.OrdinalIgnoreCase) != -1) { var token = ListTokenDictionary[tokenString]; if (!token.Web.Id.Equals(web.Id)) { return(tokenString); } } hasMatch = true; return(val); } return(match.Groups[0].Value); }); } while (hasMatch && input != output); return(output); }
public bool Revoke(int id, string currentToken) { var user = _context.Users.SingleOrDefault(x => x.Id == id); if (user is null) { return(false); } TokenDictionary.Remove(id, currentToken); return(true); }
public async Task <FlowConfig> GetDefaultConfig(TokenDictionary tokens = null) { var config = await CommonsData.GetByName(CommonDataName_DefaultFlowConfig); if (tokens != null) { config = tokens.Resolve(config); } return(FlowConfig.From(config)); }
public async Task <FlowConfig> GetFlowConfigByInputType(string inputType, TokenDictionary tokens = null) { var flowConfigName = this.GetFlowConfigName(inputType); var config = await CommonsData.GetByName(flowConfigName); if (tokens != null) { config = tokens.Resolve(config); } return(FlowConfig.From(config)); }
public static TokenDictionary ToTokens(this IDictionary <string, string> origin) { if (origin == null) { return(null); } var result = new TokenDictionary(); result.AddBatch(origin.Select(kv => KeyValuePair.Create(kv.Key, Token.FromString(kv.Value)))); return(result); }
public void ImportJSON() { Debug.Log("Trying to import " + fileToTest.name); TokenDictionary dic = new TokenDictionary(); dic.fileContents = fileToTest.text; dic.filePath = fileToTest.name; if (dic.ImportJSON()) { Debug.Log("Imported successfully."); } }
public Token(string str, string encode_type = "MD5") { Data = Security.Encode(str, encode_type); TimeLife = DEFAULT_TIME_LIFE; new Timer((s) => { TimeLife -= 1; if (TimeLife <= 0 && TokenDictionary.ContainsKey(this)) { TokenDictionary.Remove(this); } }, null, 0, DEFAULT_PERIOD_DECREASE_TIME_LIFE); }
public void SetUp() { Lexer lexer; _emptyDictionary = new TestingTokenDictionary(); lexer = new Lexer(); _filledDictionary = lexer.Dictionary; Assert.That(_filledDictionary, Is.Not.Null); Assert.That(_filledDictionary.Count, Is.GreaterThan(0)); return; }
/// <summary> /// /// </summary> /// <param name="model">The model.</param> /// <param name="labeled">if set to <c>true</c> [labeled].</param> /// <param name="unlabeled">if set to <c>true</c> [unlabeled].</param> /// <returns></returns> public static TokenDictionary GetTerms(this SpaceModel model, Boolean labeled, Boolean unlabeled) { TokenDictionary output = new TokenDictionary(); if (labeled) { output.MergeDictionary(model.terms_known_label); } if (unlabeled) { output.MergeDictionary(model.terms_unknown_label); } return(output); }
public static string GetToken(HttpMethodEnum method, string url, HeaderCollection headers, string contentType, string body, string tokenPath, Guid tokenId, int tokenExpiresIn, bool cachedToken) { if (!string.IsNullOrEmpty(body) & string.IsNullOrEmpty(contentType)) { throw new ArgumentNullException("ContentType cannot be null or empty when Body is specified", "contentType"); } if (string.IsNullOrEmpty(tokenPath)) { throw new ArgumentNullException("TokenPath cannot be null", "tokenPath"); } string token = null; if (cachedToken) { if (tokenExpiresIn <= 0) { throw new ArgumentException("tokenExpiresIn must have a value greater than zero.", "tokenExpiresIn"); } if (tokenId == null || tokenId == Guid.Empty) { throw new ArgumentException("Invalid tokenId", "tokenId"); } TokenInfo ti = AppDomainHelper.TokenDictionary.GetOrCreateTokenInfo(tokenId, tokenExpiresIn); lock (ti) { if (ti.IsNew || !ti.IsValid | !cachedToken) { token = GetToken(method, url, headers, contentType, body, tokenPath); ti.SetTokenInfo(token, DateTime.Now); TokenDictionary.WriteLogMessage(message: $"Get new token for TokenId '{tokenId}'.", procName: "Cached Token"); } else { token = ti.Token; TokenDictionary.WriteLogMessage(message: $"Get token for TokenId '{tokenId}' from cache.", procName: "Cached Token"); } } } else { token = GetToken(method, url, headers, contentType, body, tokenPath); TokenDictionary.WriteLogMessage(message: $"Get new token", procName: "No Cache"); } return(token); }
private void BuildTokenCache() { foreach (var tokenDefinition in _tokens) { foreach (string token in tokenDefinition.GetTokens()) { var tokenKey = Regex.Unescape(token); if (TokenDictionary.ContainsKey(tokenKey)) { continue; } string value = tokenDefinition.GetReplaceValue(); TokenDictionary[tokenKey] = value; } } }
protected GuerillaBlockClassBase(string className) { TokenDictionary = new TokenDictionary(); TokenDictionary.Add(className); TargetUnit = new CodeCompileUnit(); var tagsCodeNamespace = new CodeNamespace("Moonfish.Guerilla.Tags"); tagsCodeNamespace.Imports.Add(new CodeNamespaceImport("Moonfish.Tags")); tagsCodeNamespace.Imports.Add(new CodeNamespaceImport("Moonfish.Model")); tagsCodeNamespace.Imports.Add(new CodeNamespaceImport("System.IO")); tagsCodeNamespace.Imports.Add(new CodeNamespaceImport("System.Collections.Generic")); tagsCodeNamespace.Imports.Add(new CodeNamespaceImport("System.Linq")); TargetClass = new CodeTypeDeclaration(className) { TypeAttributes = TypeAttributes.Public }; tagsCodeNamespace.Types.Add(TargetClass); TargetUnit.Namespaces.Add(tagsCodeNamespace); }
/// <summary> /// Updates Score in array and return it /// </summary> /// <param name="startIndex">Start index of term to calculate split set score</param> /// <param name="endIndex">End index of term to calculate split set score</param> /// <returns>Split Set Score</returns> private SplitSetScore PopulateBestSplitSetScore(int startIndex, int endIndex) { for (int startPosition = endIndex; startPosition >= startIndex; startPosition--) { List <SplitPositionWithIdentification> possibleIndexes = TokenDictionary.GetPossibleEndIndexesList(_term, startPosition, endIndex); SplitSetScore bestSplitSetScore = null; for (int currentSplitPosition = startPosition; currentSplitPosition <= endIndex; currentSplitPosition++) { SplitPositionWithIdentification splitPositionWithIdentification = possibleIndexes.FirstOrDefault(x => x.Position == currentSplitPosition) ?? new SplitPositionWithIdentification(currentSplitPosition, SplitIdentification.Unidentified); SplitSetScore splitSetScore = new SplitSetScore(splitPositionWithIdentification, GetBestSplitSetScore(currentSplitPosition + 1), startPosition); if (splitSetScore.IsBetterThan(bestSplitSetScore)) { bestSplitSetScore = splitSetScore; } } _splitSetScores[startPosition] = bestSplitSetScore; } return(_splitSetScores[startIndex]); }
private Double GetDivisor(TokenDictionary document) { if (document.Count == 0) { return(1); } switch (normalization) { case TFNormalization.squareRootOfSquareSum: return(document.GetSquareRootOfSumSquareFrequencies()); break; default: case TFNormalization.divisionByMaxTF: return(document.GetMaxFrequency()); break; } }
/// <summary> /// Makes ranked table with term frequencies /// </summary> /// <param name="terms">The terms.</param> /// <param name="name">The name.</param> /// <param name="description">The description.</param> /// <param name="limit">The limit.</param> /// <returns></returns> public static DataTable MakeTable(this TokenDictionary terms, string name, string description, Int32 limit = 1000) { DataTable table = new DataTable(); table.SetTitle(name); table.SetDescription(description); table.SetAdditionalInfoEntry("Dictinct terms", terms.Count, "Total distinct terms in the dictionary"); table.SetAdditionalInfoEntry("Max frequency", terms.GetMaxFrequency(), "Highest frequency"); table.SetAdditionalInfoEntry("Total tokens", terms.GetSumFrequency(), "Total number of tokens extracted from the corpus/document, i.e. sum of all frequencies"); DataColumn column_rank = table.Add("Rank", "Rank by frequency", "R", typeof(Int32), imbSCI.Core.enums.dataPointImportance.normal).SetWidth(20); DataColumn column_id = table.Add("ID", "Token ID", "id", typeof(Int32), imbSCI.Core.enums.dataPointImportance.normal).SetWidth(20); DataColumn column_token = table.Add("Token", "Token", "t", typeof(String), imbSCI.Core.enums.dataPointImportance.normal).SetWidth(50); DataColumn column_freq = table.Add("Frequency", "Absolute number of token occurrences in the corpus/document", "TF", typeof(Int32), imbSCI.Core.enums.dataPointImportance.normal).SetWidth(30); // var tokens = terms.GetTokens(); var list = terms.GetRankedTokenFrequency(limit); Int32 c = 1; foreach (var pair in list) { var dr = table.NewRow(); dr[column_rank] = c; dr[column_id] = terms.GetTokenID(pair.Key); dr[column_token] = pair.Key; dr[column_freq] = pair.Value; c++; table.Rows.Add(dr); } if (terms.Count > limit) { table.AddExtra("Table contains only top [" + limit + "] entries, out of [" + terms.Count + "] enumerated in the dictionary"); } return(table); }
public static String MakeRankedList(this TokenDictionary terms, string name, string description, Int32 limit = 1000, String filepath = "") { StringBuilder sb = new StringBuilder(); var list = terms.GetRankedTokenFrequency(limit); Int32 c = 1; sb.AppendLine("Name: " + name); sb.AppendLine("Description: " + description); sb.AppendLine("Distinct terms: " + terms.Count); if (limit > 0) { sb.AppendLine("Showing top: " + limit); } sb.AppendLine("# \t\t ID \t\t KEY \t\t TKN \t\t\t FREQ"); foreach (var pair in list) { sb.AppendLine(c.ToString() + "\t\t" + terms.GetTokenID(pair.Key) + "\t\t" + pair.Key + "\t\t\t" + pair.Value); c++; if (limit > 0) { if (c > limit) { break; } } } if (!filepath.isNullOrEmpty()) { File.WriteAllText(filepath, sb.ToString()); } return(sb.ToString()); }
protected string GenerateName(MemberAttributes attributes, TokenDictionary tokenDictionary, bool takeFirstMatch, params string[] nameStrings) { tokenDictionary = tokenDictionary ?? TokenDictionary; string validToken = null; foreach (var nameString in nameStrings) { var token = ConvertCaseFormating(attributes, nameString).ToAlphaNumericToken(); if (string.IsNullOrWhiteSpace(token)) { continue; } // this token is unused and not null or whitespace so return it if (!tokenDictionary.Contains(token)) { return(tokenDictionary.GenerateValidToken(token)); } // this token is a potential match but has a naming conflict // we only want the first potential match so once we assign to // validToken we never do it again if (validToken == null && tokenDictionary.Contains(token)) { if (takeFirstMatch) { return(tokenDictionary.GenerateValidToken(token)); } validToken = token; } } return (tokenDictionary.GenerateValidToken(!string.IsNullOrWhiteSpace(validToken) ? validToken : ConvertCaseFormating(attributes, "_invalid Name_"))); }
private TokenDescription GenerateToken(User user) { var tokenHandle = new JwtSecurityTokenHandler(); var date = DateTime.UtcNow; var expired = date.AddMinutes(20); var modules = new List <Claim>(); modules.Add(new Claim("id", user.Id.ToString())); modules.Add(new Claim("username", user.Username)); foreach (var module in user.UsersModules) { modules.Add(new Claim("module", module.Module.Module)); } try { var tokenDescriptor = new SecurityTokenDescriptor { Subject = new ClaimsIdentity(modules), Expires = expired, Audience = _appSettings.Audience, Issuer = _appSettings.Issuer, EncryptingCredentials = new X509EncryptingCredentials(new X509Certificate2(_appSettings.PublicKey)), }; var token = new TokenDescription { Value = tokenHandle.CreateEncodedJwt(tokenDescriptor), Now = date, Expired = expired }; TokenDictionary.Add(user.Id, token); return(token); } catch (System.Exception) { return(null); } }
void FormatFieldNames(TokenDictionary tokenDictionary) { using (var code = new Microsoft.CSharp.CSharpCodeProvider( )) { foreach (var item in Fields) { var token = tokenDictionary.GenerateValidToken(GuerillaCs.ToMemberName(item.Value.Name)); item.Value.Name = token; } foreach (var item in Methods) { var token = tokenDictionary.GenerateValidToken(GuerillaCs.ToMemberName(item.ClassName)); item.ClassName = token; } foreach (var item in EnumDefinitions) { var token = tokenDictionary.GenerateValidToken(GuerillaCs.ToTypeName(item.Value.Name)); item.Value.Name = token; } } }
/// <summary> /// Gets the terms. /// </summary> /// <param name="includingSelf">if set to <c>true</c> [including self].</param> /// <param name="includingChildren">if set to <c>true</c> [including children].</param> /// <returns></returns> public TokenDictionary GetTerms(Boolean includingSelf, Boolean includingChildren, Boolean PassSelfIfNotEmpty = true, Boolean SetToSelfIfEmpty = true) { TokenDictionary output = new TokenDictionary(); if (PassSelfIfNotEmpty) { if (terms.Count > 0) { return(terms); } } if (includingSelf) { output.MergeDictionary(terms); } if (includingChildren) { List <SpaceDocumentModel> iteration = GetLeafs(); for (int i = 0; i < iteration.Count; i++) { output.MergeDictionary(iteration[i].terms); } } if (SetToSelfIfEmpty) { if (terms.HasChanges || terms.Count == 0) { terms = output; //terms.MergeDictionary(output); } } return(output); }
public override double GetElementFactor(string term, SpaceDocumentModel document) { if (!IsEnabled) { return(1); } TokenDictionary docDict = document.GetTerms(true, true); Double TF = docDict.GetTokenFrequency(term); switch (computation) { case TFComputation.modifiedTF: if (!index.ContainsKey(term)) { return(0); } Double Tt = index[term]; // training_terms.GetTokenFrequency(term); Double length_d = docDict.Count; //.GetTokenCount(); Double mTF_above = TF * Math.Log(SqrTc / Tt); Double mTF_below_2nd = (length_d * length_d) / SqrTc; Double mTF_below = Math.Log(docDict.GetSumSquareFrequencies() * mTF_below_2nd); return(mTF_above / mTF_below); break; } Double divisor = GetDivisor(docDict); //if (TFN_index.ContainsKey(document)) //{ // divisor = TFN_index[document]; //} //else //{ // divisor //} switch (computation) { default: case TFComputation.normal: return(TF / divisor); break; case TFComputation.squareRooted: return(Math.Sqrt(TF / divisor)); break; case TFComputation.glasgow: return(Math.Log(TF + 1) / divisor); break; } }
public async Task <FlowConfig> GetDefaultConfig(TokenDictionary tokens = null) { return(await GetFlowConfigByInputType(Constants.InputType_EventHub, tokens)); }