private void InvalidateAuthenticationToken(HttpRequestMessage request) { // Get the authentication token provided by the client in the request message string authenticationToken = GetAuthenticationTokenFromCookie(request, AuthenticationToken); if ((object)authenticationToken == null) { return; } string selector = authenticationToken.Split(':')[0]; // Determine where the credential cache is located ConfigurationFile configFile = ConfigurationFile.Current; CategorizedSettingsElementCollection systemSettings = configFile.Settings["systemSettings"]; string configurationCachePath = systemSettings["ConfigurationCachePath"].Value; string credentialCachePath = Path.Combine(configurationCachePath, "CredentialCache.bin"); // Open the credential cache lock (s_credentialCacheLock) { using (FileBackedDictionary <string, Credential> credentialCache = new FileBackedDictionary <string, Credential>(credentialCachePath)) { credentialCache.Remove(selector); } } }
public void AddTest() { using (FileBackedDictionary<int, int> dictionary = new FileBackedDictionary<int, int>()) { dictionary.Add(0, 0); Assert.IsTrue(dictionary.ContainsKey(0)); Assert.AreEqual(dictionary.Count, 1); } }
public void TryGetValueTest() { using (FileBackedDictionary <int, int> dictionary = new FileBackedDictionary <int, int>()) { dictionary.Add(0, 0); Assert.IsTrue(dictionary.TryGetValue(0, out int value)); Assert.AreEqual(value, 0); } }
public void AddTest() { using (FileBackedDictionary <int, int> dictionary = new FileBackedDictionary <int, int>()) { dictionary.Add(0, 0); Assert.IsTrue(dictionary.ContainsKey(0)); Assert.AreEqual(dictionary.Count, 1); } }
public void TryGetValueTest() { int value; using (FileBackedDictionary<int, int> dictionary = new FileBackedDictionary<int, int>()) { dictionary.Add(0, 0); Assert.IsTrue(dictionary.TryGetValue(0, out value)); Assert.AreEqual(value, 0); } }
public void ClearTest() { using (FileBackedDictionary<int, int> dictionary = new FileBackedDictionary<int, int>()) { for (int i = 0; i < 100; i++) dictionary.Add(i, i); Assert.AreEqual(dictionary.Count, 100); dictionary.Clear(); Assert.AreEqual(dictionary.Count, 0); } }
private string IssueAuthenticationToken(string username, string password) { byte[] buffer = new byte[9]; // Generate the selector for the token Random.GetBytes(buffer); string selector = Convert.ToBase64String(buffer); // Generate the validator for the token Random.GetBytes(buffer); string validator = Convert.ToBase64String(buffer); // Determine where the credential cache is located ConfigurationFile configFile = ConfigurationFile.Current; CategorizedSettingsElementCollection systemSettings = configFile.Settings["systemSettings"]; string configurationCachePath = systemSettings["ConfigurationCachePath"].Value; string credentialCachePath = Path.Combine(configurationCachePath, "CredentialCache.bin"); // Open the credential cache lock (s_credentialCacheLock) { using (FileBackedDictionary <string, Credential> credentialCache = new FileBackedDictionary <string, Credential>(credentialCachePath)) { // Clean out expired credentials before issuing a new one DateTime now = DateTime.UtcNow; List <string> expiredSelectors = credentialCache .Where(kvp => now >= kvp.Value.Expiration) .Select(kvp => kvp.Key) .ToList(); foreach (string expiredSelector in expiredSelectors) { credentialCache.Remove(expiredSelector); } credentialCache.Compact(); // Enter the new token into the credential cache credentialCache[selector] = new Credential { Validator = validator, Username = username, Password = password, Expiration = DateTime.UtcNow.AddDays(30.0D) }; } } return($"{selector}:{validator}"); }
public void ClearTest() { using (FileBackedDictionary <int, int> dictionary = new FileBackedDictionary <int, int>()) { for (int i = 0; i < 100; i++) { dictionary.Add(i, i); } Assert.AreEqual(dictionary.Count, 100); dictionary.Clear(); Assert.AreEqual(dictionary.Count, 0); } }
/// <summary> /// Attempts to use the authentication token to retrieve the user's credentials from the credential cache. /// </summary> /// <param name="authenticationToken">The token used to retrieve the user's credentials.</param> /// <param name="username">The username of the user.</param> /// <param name="password">THe user's password.</param> /// <returns>True if the user's credentials were successfully retrieved; false otherwise.</returns> public static bool TryGetCachedCredentials(string authenticationToken, out string username, out string password) { try { // Parse the selector and validator from the authentication token string[] authenticationTokenParts = authenticationToken.Split(':'); if (authenticationTokenParts.Length != 2) { username = null; password = null; return(false); } string selector = authenticationTokenParts[0]; string validator = authenticationTokenParts[1]; // Determine where the credential cache is located ConfigurationFile configFile = ConfigurationFile.Current; CategorizedSettingsElementCollection systemSettings = configFile.Settings["systemSettings"]; string configurationCachePath = systemSettings["ConfigurationCachePath"].Value; string credentialCachePath = Path.Combine(configurationCachePath, "CredentialCache.bin"); // Read the credential cache to retrieve the user's // credentials that were mapped to this authentication token lock (s_credentialCacheLock) { using (FileBackedDictionary <string, Credential> credentialCache = new FileBackedDictionary <string, Credential>(credentialCachePath)) { Credential credential; if (credentialCache.TryGetValue(selector, out credential) && validator == credential.Validator && DateTime.UtcNow < credential.Expiration) { username = credential.Username; password = credential.Password; return(true); } } } } catch (Exception ex) { OnProcessException(ex); } username = null; password = null; return(false); }
/// <summary> /// Processes data not yet processed /// by this SandBox instance. /// </summary> private void ProcessLatestDataOperation() { string latestDataFile = FilePath.GetAbsolutePath(@"LatestData.bin"); int latestFileGroupID; FileInfoDataContext fileInfo; List <int> newFileGroups; if ((object)m_systemSettings == null) { ReloadSystemSettings(); } using (FileBackedDictionary <string, int> dictionary = new FileBackedDictionary <string, int>(latestDataFile)) using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(m_systemSettings.DbConnectionString, m_systemSettings.DbTimeout)) { fileInfo = dbAdapterContainer.GetAdapter <FileInfoDataContext>(); do { dictionary.Compact(); if (!dictionary.TryGetValue("latestFileGroupID", out latestFileGroupID)) { latestFileGroupID = 0; } newFileGroups = fileInfo.FileGroups .Select(fileGroup => fileGroup.ID) .Where(id => id > latestFileGroupID) .Take(100) .OrderBy(id => id) .ToList(); foreach (int fileGroupID in newFileGroups) { MeterDataProcessor processor = new MeterDataProcessor(LoadSystemSettings()); processor.ProcessFileGroup(fileGroupID); dictionary["latestFileGroupID"] = fileGroupID; } }while (newFileGroups.Count > 0); } }
static eDNAGrafanaController() { CategorizedSettingsElementCollection systemSettings = ConfigurationFile.Open("openHistorian.exe.config").Settings["systemSettings"]; if (!systemSettings["eDNAGrafanaControllerEnabled", true]?.Value.ParseBoolean() ?? true) { return; } using (FileBackedDictionary <string, eDNADataSource> FileBackedDataSources = new FileBackedDictionary <string, eDNADataSource>(FileBackedDictionary)) { DataSources = new ConcurrentDictionary <string, eDNADataSource>(FileBackedDataSources); } string eDNAMetaData = systemSettings["eDNAMetaData"]?.Value ?? "*.*"; List <Task> tasks = new List <Task>(); foreach (string setting in eDNAMetaData.Split(',')) { string site = setting.Split('.')[0].ToUpper(); string service = setting.Split('.')[1].ToUpper(); if (!DataSources.ContainsKey($"{site}.{service}")) { DataSources.AddOrUpdate($"{site}.{service}", new eDNADataSource(site, service)); } tasks.Add(Task.Factory.StartNew(() => RefreshMetaData(site, service))); } Task.Factory.ContinueWhenAll(tasks.ToArray(), continuationTask => { using (FileBackedDictionary <string, eDNADataSource> FileBackedDataSources = new FileBackedDictionary <string, eDNADataSource>(FileBackedDictionary)) { foreach (KeyValuePair <string, eDNADataSource> kvp in DataSources) { FileBackedDataSources[kvp.Key] = kvp.Value; } FileBackedDataSources.Compact(); } }); }
public void CompactTest() { using (FileBackedDictionary <int, int> dictionary = new FileBackedDictionary <int, int>()) { for (int i = 0; i < 10000; i += 4) { dictionary.Add(i, 4); if (i % 400 == 0) { dictionary[i] = 400; } else if (i % 100 == 0) { dictionary.Remove(i); } } dictionary.Compact(); for (int i = 0; i < 10000; i++) { if (i % 400 == 0) { Assert.AreEqual(dictionary[i], 400); } else if (i % 100 == 0) { Assert.IsFalse(dictionary.ContainsKey(i), i.ToString()); } else if (i % 4 == 0) { Assert.AreEqual(dictionary[i], 4); } else { Assert.IsFalse(dictionary.ContainsKey(i), i.ToString()); } } } }
/// <summary> /// RefreshAllMetaData refreshes the metadata on command. /// </summary> public static void RefreshAllMetaData() { List <Task> tasks = new List <Task>(); foreach (KeyValuePair <string, eDNADataSource> kvp in DataSources) { string site = kvp.Key.Split('.')[0].ToUpper(); string service = kvp.Key.Split('.')[1].ToUpper(); tasks.Add(Task.Factory.StartNew(() => RefreshMetaData(site, service))); } Task.Factory.ContinueWhenAll(tasks.ToArray(), continuationTask => { using (FileBackedDictionary <string, eDNADataSource> FileBackedDataSources = new FileBackedDictionary <string, eDNADataSource>(FileBackedDictionary)) { foreach (KeyValuePair <string, eDNADataSource> kvp in DataSources) { FileBackedDataSources[kvp.Key] = kvp.Value; } FileBackedDataSources.Compact(); } }); }
public void CopyToTest() { KeyValuePair <int, int>[] array; using (FileBackedDictionary <int, int> dictionary = new FileBackedDictionary <int, int>()) { for (int i = 1; i <= 100; i++) { dictionary.Add(i, i); } Assert.AreEqual(dictionary.Count, 100); array = new KeyValuePair <int, int> [dictionary.Count]; dictionary.CopyTo(array, 0); foreach (KeyValuePair <int, int> kvp in array) { Assert.IsTrue(dictionary.Contains(kvp), kvp.Key.ToString()); Assert.AreEqual(dictionary[kvp.Key], kvp.Value); } } }
/// <summary> /// Processes data not yet processed /// by this SandBox instance. /// </summary> private void ProcessLatestDataOperation() { string latestDataFile = FilePath.GetAbsolutePath(@"LatestData.bin"); List <int> newFileGroups; if ((object)m_systemSettings == null) { ReloadSystemSettings(); } using (FileBackedDictionary <string, int> dictionary = new FileBackedDictionary <string, int>(latestDataFile)) using (AdoDataConnection connection = new AdoDataConnection("systemSettings")) { do { dictionary.Compact(); if (!dictionary.TryGetValue("latestFileGroupID", out m_latestFileGroupID)) { m_latestFileGroupID = 0; } newFileGroups = (new TableOperations <FileGroup>(connection)).QueryRecordsWhere("ID > {0}", m_latestFileGroupID) .Select(fileGroup => fileGroup.ID) .Take(100) .OrderBy(id => id) .ToList(); foreach (int fileGroupID in newFileGroups) { MeterDataProcessor processor = new MeterDataProcessor(s_connectionString); processor.ProcessFileGroup(fileGroupID); dictionary["latestFileGroupID"] = fileGroupID; } }while (newFileGroups.Count > 0); } }
/// <summary> /// Processes data not yet processed /// by this SandBox instance. /// </summary> private void ProcessLatestDataOperation() { string latestDataFile = FilePath.GetAbsolutePath(@"LatestData.bin"); int latestFileGroupID; FileInfoDataContext fileInfo; List<int> newFileGroups; if ((object)m_systemSettings == null) ReloadSystemSettings(); using (FileBackedDictionary<string, int> dictionary = new FileBackedDictionary<string, int>(latestDataFile)) using (DbAdapterContainer dbAdapterContainer = new DbAdapterContainer(m_systemSettings.DbConnectionString, m_systemSettings.DbTimeout)) { fileInfo = dbAdapterContainer.GetAdapter<FileInfoDataContext>(); do { dictionary.Compact(); if (!dictionary.TryGetValue("latestFileGroupID", out latestFileGroupID)) latestFileGroupID = 0; newFileGroups = fileInfo.FileGroups .Select(fileGroup => fileGroup.ID) .Where(id => id > latestFileGroupID) .Take(100) .OrderBy(id => id) .ToList(); foreach (int fileGroupID in newFileGroups) { MeterDataProcessor processor = new MeterDataProcessor(LoadSystemSettings()); processor.ProcessFileGroup(fileGroupID); dictionary["latestFileGroupID"] = fileGroupID; } } while (newFileGroups.Count > 0); } }
public void CompactTest() { using (FileBackedDictionary<int, int> dictionary = new FileBackedDictionary<int, int>()) { for (int i = 0; i < 10000; i += 4) { dictionary.Add(i, 4); if (i % 400 == 0) dictionary[i] = 400; else if (i % 100 == 0) dictionary.Remove(i); } dictionary.Compact(); for (int i = 0; i < 10000; i++) { if (i % 400 == 0) Assert.AreEqual(dictionary[i], 400); else if (i % 100 == 0) Assert.IsFalse(dictionary.ContainsKey(i), i.ToString()); else if (i % 4 == 0) Assert.AreEqual(dictionary[i], 4); else Assert.IsFalse(dictionary.ContainsKey(i), i.ToString()); } } }
// Run Query in Haoop private void m_timer_Elapsed(object sender, ElapsedEventArgs e) { List <IMeasurement> measurements = new List <IMeasurement>(); OnStatusMessage(MessageLevel.Info, "Connecting to Hadoop DB for update"); if (Monitor.TryEnter(m_timer)) { try { bool addTicks = !string.IsNullOrEmpty(SubSecondField); //Connect to DataBase using (OdbcConnection connection = new OdbcConnection(HadoopConnectionString)) { m_currNum = 0; int nPoints = 0; foreach (Guid guid in m_queryParameter.Keys) { Ticks newerThan; m_currNum++; nPoints = 0; lock (s_TimeStampUpdateLock) { using (FileBackedDictionary <Guid, Ticks> dictionary = new FileBackedDictionary <Guid, Ticks>(TimeStampUpdatefile)) { if (!dictionary.TryGetValue(guid, out newerThan)) { newerThan = m_oldestTimestamp; } } } object[] param = { newerThan.ToString("yyyy-MM-dd hh:mm:ss") }; param = param.Concat(m_queryParameter[guid]).ToArray(); DataTable table = connection.RetrieveData(string.Format(m_query, param)); foreach (DataRow row in table.Rows) { Measurement measurement = new Measurement { Metadata = MeasurementKey.LookUpOrCreate(guid, "").Metadata }; measurement.Value = row.AsDouble(0) ?? double.NaN; measurement.Timestamp = DateTime.Parse(row.AsString(1)); // This is only down to Seconds accuracy so we do make sure we are only keeping the seconds here measurement.Timestamp = measurement.Timestamp - measurement.Timestamp.DistanceBeyondSecond(); if (addTicks) { measurement.Timestamp = measurement.Timestamp + row.AsInt64(2) ?? 0; } if (measurement.Timestamp <= newerThan) { continue; } measurements.Add(measurement); nPoints++; if (measurement.Timestamp > newerThan) { newerThan = measurement.Timestamp; } } lock (s_TimeStampUpdateLock) { using (FileBackedDictionary <Guid, Ticks> dictionary = new FileBackedDictionary <Guid, Ticks>(TimeStampUpdatefile)) { if (dictionary.Keys.Contains(guid)) { dictionary[guid] = newerThan; } else { dictionary.Add(guid, newerThan); } } } m_lastConnected = DateTime.UtcNow; if (m_currNum % 20 == 0) { OnStatusMessage(MessageLevel.Info, $"Got Measurements for {m_currNum} out of {m_nTags} Tags"); OnStatusMessage(MessageLevel.Info, $"Obtained {nPoints} Points For Tag {guid} up to {newerThan:dd/MM/yyyy hh:mm:ss}"); } } } } catch (InvalidOperationException ex) { // Pooled timer thread executed after last read, verify timer has stopped m_timer.Enabled = false; OnProcessException(MessageLevel.Warning, ex); } catch (Exception ex) { OnProcessException(MessageLevel.Error, ex); } finally { Monitor.Exit(m_timer); } } // Publish all measurements for this time interval m_num = measurements.Count; OnStatusMessage(MessageLevel.Info, $"Disconnected from Hadoop with a total of {m_num} Points"); if (measurements.Count > 0) { OnNewMeasurements(measurements); } }
public void CopyToTest() { KeyValuePair<int, int>[] array; using (FileBackedDictionary<int, int> dictionary = new FileBackedDictionary<int, int>()) { for (int i = 1; i <= 100; i++) dictionary.Add(i, i); Assert.AreEqual(dictionary.Count, 100); array = new KeyValuePair<int, int>[dictionary.Count]; dictionary.CopyTo(array, 0); foreach (KeyValuePair<int, int> kvp in array) { Assert.IsTrue(dictionary.Contains(kvp), kvp.Key.ToString()); Assert.AreEqual(dictionary[kvp.Key], kvp.Value); } } }