private void WriteCacheData(FileInfo file, DataLogCacheData cacheData) { // Create the cache location - Create() will no-op if it already exists _cacheLocation.Create(); using (FileStream stream = file.Create()) { // Using a DataContractSerializer instead of HP.ScalableTest.Framework.Serializer // because it produces about 50% less data and will keep the cache smaller. _serializer.WriteObject(stream, cacheData); } }
/// <summary> /// Adds the specified log table data to the cache. /// </summary> /// <param name="table">The <see cref="LogTableDefinition" />.</param> /// <param name="record">The <see cref="LogTableRecord" />.</param> /// <param name="isInsert">if set to <c>true</c> this data should be processed as an insert.</param> /// <exception cref="ArgumentNullException"> /// <paramref name="table" /> is null. /// <para>or</para> /// <paramref name="record" /> is null. /// </exception> public void Add(LogTableDefinition table, LogTableRecord record, bool isInsert) { if (table == null) { throw new ArgumentNullException(nameof(table)); } if (record == null) { throw new ArgumentNullException(nameof(record)); } string operation = isInsert ? "INSERT" : "UPDATE"; string fileName = $"{table.Name} {operation} {record[table.PrimaryKey]}.xml"; FileInfo file = new FileInfo(Path.Combine(_cacheLocation.FullName, fileName)); LogTrace($"Adding cache file {file.Name}"); DataLogCacheData cacheData = new DataLogCacheData(table, record, isInsert); WriteCacheData(file, cacheData); }
public IEnumerable <DataLogDatabaseResult> RetryFromCache() { if (!Directory.Exists(_cacheLocation.FullName)) { return(Enumerable.Empty <DataLogDatabaseResult>()); } // Ignore files that have exceeded the maximum retry time. DateTime cutoff = DateTime.Now - _maximumRetryTime; bool loggedStart = false; int fileErrors = 0; List <DataLogDatabaseResult> results = new List <DataLogDatabaseResult>(); foreach (FileInfo file in _cacheLocation.EnumerateFiles("*.xml").Where(n => n.CreationTime > cutoff).OrderBy(n => n.CreationTime)) { if (!loggedStart) { LogDebug("Retrying from cache..."); loggedStart = true; } try { LogTrace($"Reading cache file {file.Name}"); DataLogCacheData data = ReadCacheData(file); data.Retries++; DataLogDatabaseResult result = data.IsInsert ? _databaseWriter.Insert(data.Table, data.Record) : _databaseWriter.Update(data.Table, data.Record); bool operationSuccessful = result.Success; if (!operationSuccessful && _alternateWriter != null) { DataLogDatabaseResult alternateResult = data.IsInsert ? _alternateWriter.Insert(data.Table, data.Record) : _alternateWriter.Update(data.Table, data.Record); operationSuccessful = alternateResult.Success; } if (operationSuccessful) { LogTrace($"Cache operation successful. Deleting file {file.Name}."); try { Retry.WhileThrowing <IOException>(() => file.Delete(), 5, TimeSpan.FromSeconds(1)); } catch (IOException ex) { LogDebug($"File {file.Name} could not be deleted: {ex.Message}"); } } else { LogTrace($"Cache operation failed. {file.Name} has been retried {data.Retries} times."); WriteCacheData(file, data); } result.Retries = data.Retries; results.Add(result); } catch (Exception ex) { // Do nothing - could be a fluke, or a file that we can't read LogTrace($"Unknown error: {ex.Message}"); fileErrors++; } } if (results.Any()) { LogDebug($"Cache retry results: {results.Count(n => n.Success)} succeeded, {results.Count(n => !n.Success)} failed, {fileErrors} cache errors."); } return(results); }