public void Can_rename_file() { var f1 = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); var f2 = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString()); var file = FirkinFile.CreateActive(f1, 1); var data = new MemoryStream(); data.WriteByte(2); data.Position = 0; var keyInfo = file.Write(new KeyValuePair() { Key = new byte[] { 1 }, Value = data, ValueSize = (uint)data.Length }); try { file.Rename(f2); Assert.IsFalse(File.Exists(f1)); Assert.IsTrue(File.Exists(f2)); var stream = file.ReadValue(keyInfo); Assert.AreEqual(1, stream.Length); Assert.AreEqual(2, stream.ReadByte()); file.Dispose(); } finally { File.Delete(f1); File.Delete(f2); } }
private void NewHead() { _log.DebugFormat("switching to new active file at size {0}", _head.Size); _files[_head.FileId] = FirkinFile.OpenArchiveFromActive(_head); var fileId = (ushort)(_head.FileId + 1); _head = FirkinFile.CreateActive(GetDataFilename(fileId), fileId); _files[_head.FileId] = _head; }
public void Truncate() { // Note: Have to acquire merge then index syncroots otherwise we're liable to run into a deadlock lock (_mergeSyncRoot) { lock (_indexSyncRoot) { foreach (var file in _files.Values) { var filename = file.Filename; file.Dispose(); File.Delete(filename); } _files.Clear(); _index.Clear(); _head = FirkinFile.CreateActive(GetDataFilename(1), 1); _files[_head.FileId] = _head; } } }
private void Initialize() { if (!Directory.Exists(_storeDirectory)) { Directory.CreateDirectory(_storeDirectory); } // get all data files var files = from filename in Directory.GetFiles(_storeDirectory, STORAGE_FILE_PREFIX + "*" + DATA_FILE_EXTENSION) let fileId = ParseFileId(filename) orderby fileId select new { FileId = fileId, Filename = filename }; uint maxSerial = 0; IFirkinArchiveFile last = null; foreach (var fileInfo in files) { maxSerial = 0; var file = FirkinFile.OpenArchive(fileInfo.Filename, fileInfo.FileId); _files.Add(file.FileId, file); // iterate over key info var hintFilename = GetHintFilename(fileInfo.FileId); var count = 0; var delete = 0; if (File.Exists(hintFilename)) { var hintFile = new FirkinHintFile(hintFilename); foreach (var hint in hintFile) { var keyInfo = new KeyInfo(fileInfo.FileId, hint); var key = _serializer.Deserialize(hint.Key); _index[key] = keyInfo; count++; } hintFile.Dispose(); _log.DebugFormat("read {0} record markers from hint file {1}", count, fileInfo.FileId); } else { foreach (var pair in file) { var key = _serializer.Deserialize(pair.Key); maxSerial = pair.Value.Serial; if (pair.Value.ValueSize == 0) { _index.Remove(key); delete++; } else { _index[key] = pair.Value; count++; } } _log.DebugFormat("read {0} record and {1} delete markers from data file {2}", count, delete, fileInfo.FileId); } last = file; } if (last != null && last.Size < _maxFileSize) { _head = FirkinFile.OpenActiveFromArchive(last, maxSerial); } else { ushort fileId = 1; if (last != null) { fileId += last.FileId; } _head = FirkinFile.CreateActive(GetDataFilename(fileId), fileId); } _files[_head.FileId] = _head; }
public void Merge() { lock (_mergeSyncRoot) { IFirkinFile[] oldFiles; IFirkinFile head; int recordCount; lock (_indexSyncRoot) { head = _head; oldFiles = _files.Values.Where(x => x != head).OrderBy(x => x.FileId).ToArray(); recordCount = Count; } _log.DebugFormat("starting merge of {0} files, {1} records (with head at id {2}) in '{3}' ", oldFiles.Length, recordCount, head.FileId, _storeDirectory); if (oldFiles.Length == 0) { // not merging if there is only one archive file return; } // merge current data into new data files and write out accompanying hint files ushort fileId = 0; var mergePairs = new List <MergePair>(); MergePair current = null; uint serial = 0; foreach (var file in oldFiles) { var deleted = 0; var outofdate = 0; var active = 0; foreach (var record in file.GetRecords()) { if (current == null) { fileId++; serial = 0; current = new MergePair() { Data = FirkinFile.CreateActive(GetMergeDataFilename(fileId), fileId), Hint = new FirkinHintFile(GetMergeHintFilename(fileId)) }; mergePairs.Add(current); } if (record.ValueSize == 0) { // not including deletes on merge deleted++; continue; } var key = _serializer.Deserialize(record.Key); // TODO: do i need a lock on _index here? KeyInfo info; if (!_index.TryGetValue(key, out info)) { // not including record that's no longer in index outofdate++; continue; } if (info.FileId != file.FileId || info.Serial != record.Serial) { // not including out-of-date record outofdate++; continue; } var newRecord = record; newRecord.Serial = ++serial; var valuePosition = current.Data.Write(newRecord); current.Hint.WriteHint(newRecord, valuePosition); // if our current file is over the maxsize and not about to collide with the head's id ... if (current.Data.Size > _maxFileSize && fileId < head.FileId) { // ... set it to null, so we can create the next file current = null; } active++; } _log.DebugFormat("read {0} records, skipped {1} deleted and {2} outofdate", active, deleted, outofdate); } _log.DebugFormat("merged {0} file(s) into {1} file(s)", oldFiles.Length, mergePairs.Count); // rebuild the index based on new files var newIndex = new Dictionary <TKey, KeyInfo>(); var newFiles = new Dictionary <ushort, IFirkinFile>(); var mergeFiles = new List <IFirkinFile>(); var mergedRecords = 0; foreach (var pair in mergePairs) { var file = FirkinFile.OpenArchiveFromActive(pair.Data); newFiles.Add(file.FileId, file); mergeFiles.Add(file); foreach (var hint in pair.Hint) { var keyInfo = new KeyInfo(pair.Data.FileId, hint); var key = _serializer.Deserialize(hint.Key); newIndex[key] = keyInfo; mergedRecords++; } pair.Hint.Dispose(); } _log.DebugFormat("read {0} records from hint files", mergedRecords); // add records && files not part of merge lock (_indexSyncRoot) { foreach (var file in _files.Values.Where(x => x.FileId >= head.FileId).OrderBy(x => x.FileId)) { newFiles[file.FileId] = file; foreach (var pair in file) { var key = _serializer.Deserialize(pair.Key); if (pair.Value.ValueSize == 0) { newIndex.Remove(key); } else { newIndex[key] = pair.Value; } } _log.DebugFormat("added entries from file {0}: {1}", file.FileId, newIndex.Count); } _log.DebugFormat("total records in merged index: {0}", newIndex.Count); // swap out index and file list _index = newIndex; _files = newFiles; } try { // move old files out of the way foreach (var file in oldFiles) { file.Dispose(); var oldFile = GetOldDataFilename(file.FileId); #if DEBUG _log.DebugFormat("moving old from {0} to {1}", Path.GetFileName(file.Filename), Path.GetFileName(oldFile)); #endif File.Move(file.Filename, oldFile); var hintfile = GetHintFilename(file.FileId); if (File.Exists(hintfile)) { var oldHintFile = GetOldHintFilename(file.FileId); #if DEBUG _log.DebugFormat("moving old hint from {0} to {1}", Path.GetFileName(hintfile), Path.GetFileName(oldHintFile)); #endif File.Move(hintfile, oldHintFile); } } // move new files into place foreach (var file in mergeFiles) { #if DEBUG _log.DebugFormat("creating file and hint for id {0}", file.FileId); #endif file.Rename(GetDataFilename(file.FileId)); File.Move(GetMergeHintFilename(file.FileId), GetHintFilename(file.FileId)); } // delete old files foreach (var file in oldFiles) { var oldFile = GetOldDataFilename(file.FileId); #if DEBUG _log.DebugFormat("deleting old file {0}", Path.GetFileName(oldFile)); #endif File.Delete(oldFile); var hintfile = GetOldHintFilename(file.FileId); if (File.Exists(hintfile)) { #if DEBUG _log.DebugFormat("deleting old hint file {0}", Path.GetFileName(hintfile)); #endif File.Delete(hintfile); } } } catch (Exception e) { // something went wrong, try to recover to pre-merge state // TODO: go back to pre-merge state _log.Warn("Unable to complete merge", e); } } _log.DebugFormat("completed merge in '{0}'", _storeDirectory); }