public void load_cache_when_incorrect_data_file() { var currentPath = Path.Combine(_storePath, "EmptyCache"); Directory.CreateDirectory(currentPath); var store = new FileAppendOnlyStore(new DirectoryInfo(currentPath)); //write frame using (var stream = new FileStream(Path.Combine(currentPath, "0.dat"), FileMode.OpenOrCreate, FileAccess.Write, FileShare.ReadWrite)) StorageFramesEvil.WriteFrame("test-key", 0, Encoding.UTF8.GetBytes("test message"), stream); //write incorrect frame using (var sw = new StreamWriter(Path.Combine(currentPath, "1.dat"))) sw.Write("incorrect frame data"); store.LoadCaches(); var data = store.ReadRecords(0, Int32.MaxValue).ToArray(); Assert.AreEqual(1, data.Length); Assert.AreEqual("test-key", data[0].Key); Assert.AreEqual(0, data[0].StreamVersion); Assert.AreEqual("test message", Encoding.UTF8.GetString(data[0].Data)); Assert.IsTrue(File.Exists(Path.Combine(currentPath, "1.dat"))); }
IEnumerable <StorageFrameDecoded> EnumerateHistory() { // cleanup old pending files // load indexes // build and save missing indexes var datFiles = _info.EnumerateFiles("*.dat"); foreach (var fileInfo in datFiles.OrderBy(fi => fi.Name)) { // quick cleanup if (fileInfo.Length == 0) { fileInfo.Delete(); continue; } using (var reader = fileInfo.OpenRead()) { StorageFrameDecoded result; while (StorageFramesEvil.TryReadFrame(reader, out result)) { yield return(result); } } } }
IEnumerable <StorageFrameDecoded> EnumerateHistory() { // cleanup old pending files // load indexes // build and save missing indexes var datFiles = _container .ListBlobs(new BlobRequestOptions() { BlobListingDetails = BlobListingDetails.Metadata }) .OrderBy(s => s.Uri.ToString()) .OfType <CloudPageBlob>() .Where(s => s.Name.EndsWith(".dat")); foreach (var fileInfo in datFiles) { var bytes = fileInfo.DownloadByteArray(); bool potentiallyNonTruncatedChunk = bytes.Length % _pageSizeMultiplier == 0; long lastValidPosition = 0; using (var stream = new MemoryStream(bytes)) { StorageFrameDecoded result; while (StorageFramesEvil.TryReadFrame(stream, out result)) { lastValidPosition = stream.Position; yield return(result); } } var haveSomethingToTruncate = bytes.Length - lastValidPosition >= 512; if (potentiallyNonTruncatedChunk & haveSomethingToTruncate) { TruncateBlob(lastValidPosition, fileInfo); } } }
void PersistInFile(string key, byte[] buffer, long commit) { StorageFramesEvil.WriteFrame(key, commit, buffer, _currentWriter); // make sure that we persist // NB: this is not guaranteed to work on Linux _currentWriter.Flush(true); }
public void read_empty_frame() { Stream stream = new MemoryStream(); StorageFrameDecoded decoded; var isreadFrame = StorageFramesEvil.TryReadFrame(stream, out decoded); Assert.IsFalse(isreadFrame); }
void Persist(string key, byte[] buffer, long commit) { var frame = StorageFramesEvil.EncodeFrame(key, buffer, commit); if (!_currentWriter.Fits(frame.Data.Length + frame.Hash.Length)) { CloseWriter(); EnsureWriterExists(_all.Length); } _currentWriter.Write(frame.Data); _currentWriter.Write(frame.Hash); _currentWriter.Flush(); }
private void Persist(string key, byte[] buffer, long commit) { var frame = StorageFramesEvil.EncodeFrame(key, buffer, commit); if (!this._currentWriter.Fits(frame.Data.Length + frame.Hash.Length)) { this.CloseWriter(); this.EnsureWriterExists(this._cache.StoreVersion); } this._currentWriter.Write(frame.Data); this._currentWriter.Write(frame.Hash); this._currentWriter.Flush(); }
public void read_write_frame() { string msg = "test message"; Stream stream = new MemoryStream(); StorageFramesEvil.WriteFrame("test-key", 555, Encoding.UTF8.GetBytes(msg), stream); stream.Seek(0, SeekOrigin.Begin); var decoded = StorageFramesEvil.ReadFrame(stream); Assert.AreEqual("test-key", decoded.Name); Assert.AreEqual(555, decoded.Stamp); Assert.AreEqual(msg, Encoding.UTF8.GetString(decoded.Bytes)); }
void CreateCacheFiles() { const string msg = "test messages"; Directory.CreateDirectory(_storePath); for (int index = 0; index < DataFileCount; index++) { using (var stream = new FileStream(Path.Combine(_storePath, index + ".dat"), FileMode.OpenOrCreate, FileAccess.Write, FileShare.ReadWrite)) { for (int i = 0; i < FileMessagesCount; i++) { StorageFramesEvil.WriteFrame("test-key" + index, i, Encoding.UTF8.GetBytes(msg + i), stream); } } } }
public void async_read_write_more_frame() { //GIVEN string msg = "test message"; var path = Path.Combine(Path.GetTempPath(), "lokad-cqrs", Guid.NewGuid() + ".pb"); Directory.CreateDirectory(Path.GetDirectoryName(path)); const int maxIndex = 100; var writeTask = Task.Factory.StartNew(() => { using (var stream = new FileStream(path, FileMode.OpenOrCreate, FileAccess.Write, FileShare.ReadWrite)) { for (int i = 0; i < maxIndex; i++) { StorageFramesEvil.WriteFrame("test-key" + i, i, Encoding.UTF8.GetBytes(msg + i), stream); } } }); //WHEN int index = 0; var readTask = Task.Factory.StartNew(() => { using (var stream = new FileStream(path, FileMode.OpenOrCreate, FileAccess.Read, FileShare.ReadWrite)) { while (index < maxIndex) { StorageFrameDecoded decoded; if (StorageFramesEvil.TryReadFrame(stream, out decoded)) { Assert.AreEqual("test-key" + index, decoded.Name); Assert.AreEqual(index, decoded.Stamp); Assert.AreEqual(msg + index, Encoding.UTF8.GetString(decoded.Bytes)); index++; } } } }); writeTask.Wait(); readTask.Wait(); Assert.AreEqual(maxIndex, index); }
public byte[] SaveEnvelopeData(ImmutableEnvelope envelope) { using (var mem = new MemoryStream()) { byte[] data; using (var block = new MemoryStream()) { MessageSerializer.WriteAttributes(envelope.Attributes, block); MessageSerializer.WriteMessage(envelope.Message, envelope.Message.GetType(), block); data = block.ToArray(); } MessageSerializer.WriteCompactInt(Signature, mem); StorageFramesEvil.WriteFrame(envelope.EnvelopeId, DateTime.UtcNow.Ticks, data, mem); return(mem.ToArray()); } }
IEnumerable <StorageFrameDecoded> EnumerateHistory() { // cleanup old pending files // load indexes // build and save missing indexes var datFiles = _info.EnumerateFiles("*.dat"); var fileFrameCollections = new List <BlockingCollection <StorageFrameDecoded> >(); foreach (var fileInfo in datFiles.OrderBy(fi => fi.Name)) { // quick cleanup if (fileInfo.Length == 0) { fileInfo.Delete(); continue; } var fileFrames = new BlockingCollection <StorageFrameDecoded>(); fileFrameCollections.Add(fileFrames); using (var reader = new FileStream(fileInfo.FullName, FileMode.Open, FileAccess.Read, FileShare.Read, 64 * 1024, FileOptions.Asynchronous)) { byte[] buffer = new byte[fileInfo.Length]; Task.Factory.FromAsync(reader.BeginRead(buffer, 0, buffer.Length, null, null), r => { reader.EndRead(r); using (var ms = new MemoryStream(buffer)) { StorageFrameDecoded result; while (StorageFramesEvil.TryReadFrame(ms, out result)) { fileFrames.Add(result); } } fileFrames.CompleteAdding(); }); } } return(fileFrameCollections.SelectMany(fileFrameCollection => fileFrameCollection.GetConsumingEnumerable())); }
public void load_cache_when_exist_empty_file() { //write frame using (var stream = new FileStream(Path.Combine(_storePath, "0.dat"), FileMode.OpenOrCreate, FileAccess.Write, FileShare.ReadWrite)) StorageFramesEvil.WriteFrame("test-key", 0, Encoding.UTF8.GetBytes("test message"), stream); //create empty file using (var sw = new StreamWriter(Path.Combine(_storePath, "1.dat"))) sw.Write(""); _store.LoadCaches(); var data = _store.ReadRecords(0, Int32.MaxValue).ToArray(); Assert.AreEqual(1, data.Length); Assert.AreEqual("test-key", data[0].Key); Assert.AreEqual(1, data[0].StreamVersion); Assert.AreEqual("test message", Encoding.UTF8.GetString(data[0].Data)); Assert.IsFalse(File.Exists(Path.Combine(_storePath, "1.dat"))); }
public ImmutableEnvelope ReadAsEnvelopeData(byte[] buffer) { using (var mem = new MemoryStream(buffer)) { var signature = MessageSerializer.ReadCompactInt(mem); if (Signature != signature) { throw new IOException("Signature bytes mismatch in envelope"); } var frame = StorageFramesEvil.ReadFrame(mem); using (var mem1 = new MemoryStream(frame.Bytes)) { var attributes = MessageSerializer.ReadAttributes(mem1); var item = MessageSerializer.ReadMessage(mem1); var created = new DateTime(frame.Stamp, DateTimeKind.Utc); return(new ImmutableEnvelope(frame.Name, created, item, attributes)); } } }
private async Task LoadTapeFile(CloudPageBlob fileBlob, BlockingCollection <StorageFrameDecoded> fileFrames) { var retryPolicy = new ExponentialRetry(TimeSpan.FromSeconds(0.5), 100); try { var tapeStream = await _policy.Get(() => fileBlob.OpenReadAsync(null, new BlobRequestOptions { RetryPolicy = retryPolicy, MaximumExecutionTime = TimeSpan.FromMinutes(30), ServerTimeout = TimeSpan.FromMinutes(30) }, null)).ConfigureAwait(false); using (var ms = new MemoryStream()) { tapeStream.CopyToAsync(ms).Wait(); ms.Position = 0; var potentiallyNonTruncatedChunk = ms.Length % this._pageSizeMultiplier == 0; long lastValidPosition = 0; StorageFrameDecoded result; while (StorageFramesEvil.TryReadFrame(ms, out result)) { lastValidPosition = ms.Position; fileFrames.Add(result); } var haveSomethingToTruncate = ms.Length - lastValidPosition >= 512; if (potentiallyNonTruncatedChunk & haveSomethingToTruncate) { this.TruncateBlob(lastValidPosition, fileBlob, retryPolicy); } } } catch (Exception x) { this.Log().Error(x, "Error loading tape {0}", fileBlob.Name); throw; } finally { fileFrames.CompleteAdding(); } }
IEnumerable <StorageFrameDecoded> EnumerateHistory() { // cleanup old pending files // load indexes // build and save missing indexes var datFiles = _container .ListBlobs() .OrderBy(s => s.Uri.ToString()) .OfType <CloudPageBlob>(); foreach (var fileInfo in datFiles) { using (var stream = new MemoryStream(fileInfo.DownloadByteArray())) { StorageFrameDecoded result; while (StorageFramesEvil.TryReadFrame(stream, out result)) { yield return(result); } } } }