public RegistryEntity <T> Read <T>(string name) { T result = default(T); byte[] byteArray = (byte[])_registryKey.GetValue(name); RegistryValueKind registryValueKind = default(RegistryValueKind); if (byteArray == null) { return(default(RegistryEntity <T>)); } try { registryValueKind = _registryKey.GetValueKind(name); result = _byteArraySerializer.Deserialize <T>(byteArray); } catch (Exception e) when(e is InvalidCastException || e is SecurityException || e is ObjectDisposedException || e is IOException || e is UnauthorizedAccessException) { Console.WriteLine(e.ToString()); } catch (SerializationException e) { throw e; } return(new RegistryEntity <T>(name, result, registryValueKind)); }
public void Merge() { lock (_mergeSyncRoot) { IFirkinFile[] oldFiles; IFirkinFile head; int recordCount; lock (_indexSyncRoot) { head = _head; oldFiles = _files.Values.Where(x => x != head).OrderBy(x => x.FileId).ToArray(); recordCount = Count; } _log.DebugFormat("starting merge of {0} files, {1} records (with head at id {2}) in '{3}' ", oldFiles.Length, recordCount, head.FileId, _storeDirectory); if (oldFiles.Length == 0) { // not merging if there is only one archive file return; } // merge current data into new data files and write out accompanying hint files ushort fileId = 0; var mergePairs = new List <MergePair>(); MergePair current = null; uint serial = 0; foreach (var file in oldFiles) { var deleted = 0; var outofdate = 0; var active = 0; foreach (var record in file.GetRecords()) { if (current == null) { fileId++; serial = 0; current = new MergePair() { Data = FirkinFile.CreateActive(GetMergeDataFilename(fileId), fileId), Hint = new FirkinHintFile(GetMergeHintFilename(fileId)) }; mergePairs.Add(current); } if (record.ValueSize == 0) { // not including deletes on merge deleted++; continue; } var key = _serializer.Deserialize(record.Key); // TODO: do i need a lock on _index here? KeyInfo info; if (!_index.TryGetValue(key, out info)) { // not including record that's no longer in index outofdate++; continue; } if (info.FileId != file.FileId || info.Serial != record.Serial) { // not including out-of-date record outofdate++; continue; } var newRecord = record; newRecord.Serial = ++serial; var valuePosition = current.Data.Write(newRecord); current.Hint.WriteHint(newRecord, valuePosition); // if our current file is over the maxsize and not about to collide with the head's id ... if (current.Data.Size > _maxFileSize && fileId < head.FileId) { // ... set it to null, so we can create the next file current = null; } active++; } _log.DebugFormat("read {0} records, skipped {1} deleted and {2} outofdate", active, deleted, outofdate); } _log.DebugFormat("merged {0} file(s) into {1} file(s)", oldFiles.Length, mergePairs.Count); // rebuild the index based on new files var newIndex = new Dictionary <TKey, KeyInfo>(); var newFiles = new Dictionary <ushort, IFirkinFile>(); var mergeFiles = new List <IFirkinFile>(); var mergedRecords = 0; foreach (var pair in mergePairs) { var file = FirkinFile.OpenArchiveFromActive(pair.Data); newFiles.Add(file.FileId, file); mergeFiles.Add(file); foreach (var hint in pair.Hint) { var keyInfo = new KeyInfo(pair.Data.FileId, hint); var key = _serializer.Deserialize(hint.Key); newIndex[key] = keyInfo; mergedRecords++; } pair.Hint.Dispose(); } _log.DebugFormat("read {0} records from hint files", mergedRecords); // add records && files not part of merge lock (_indexSyncRoot) { foreach (var file in _files.Values.Where(x => x.FileId >= head.FileId).OrderBy(x => x.FileId)) { newFiles[file.FileId] = file; foreach (var pair in file) { var key = _serializer.Deserialize(pair.Key); if (pair.Value.ValueSize == 0) { newIndex.Remove(key); } else { newIndex[key] = pair.Value; } } _log.DebugFormat("added entries from file {0}: {1}", file.FileId, newIndex.Count); } _log.DebugFormat("total records in merged index: {0}", newIndex.Count); // swap out index and file list _index = newIndex; _files = newFiles; } try { // move old files out of the way foreach (var file in oldFiles) { file.Dispose(); var oldFile = GetOldDataFilename(file.FileId); #if DEBUG _log.DebugFormat("moving old from {0} to {1}", Path.GetFileName(file.Filename), Path.GetFileName(oldFile)); #endif File.Move(file.Filename, oldFile); var hintfile = GetHintFilename(file.FileId); if (File.Exists(hintfile)) { var oldHintFile = GetOldHintFilename(file.FileId); #if DEBUG _log.DebugFormat("moving old hint from {0} to {1}", Path.GetFileName(hintfile), Path.GetFileName(oldHintFile)); #endif File.Move(hintfile, oldHintFile); } } // move new files into place foreach (var file in mergeFiles) { #if DEBUG _log.DebugFormat("creating file and hint for id {0}", file.FileId); #endif file.Rename(GetDataFilename(file.FileId)); File.Move(GetMergeHintFilename(file.FileId), GetHintFilename(file.FileId)); } // delete old files foreach (var file in oldFiles) { var oldFile = GetOldDataFilename(file.FileId); #if DEBUG _log.DebugFormat("deleting old file {0}", Path.GetFileName(oldFile)); #endif File.Delete(oldFile); var hintfile = GetOldHintFilename(file.FileId); if (File.Exists(hintfile)) { #if DEBUG _log.DebugFormat("deleting old hint file {0}", Path.GetFileName(hintfile)); #endif File.Delete(hintfile); } } } catch (Exception e) { // something went wrong, try to recover to pre-merge state // TODO: go back to pre-merge state _log.Warn("Unable to complete merge", e); } } _log.DebugFormat("completed merge in '{0}'", _storeDirectory); }