public IEnumerable <TStream> Fetch(string symbol, string column, DateTime start, DateTime end) { var bTree = GetBTree(symbol, column); foreach (var entry in bTree.Search(start, end).ToList()) { if (start < entry.Key || !entry.Value.FilePath.FileExists()) { // Load from start to entry.Key foreach (var stream in CollectAndFetch(symbol, column, start, entry.Key.AddTicks(-1))) { yield return(stream); } Persist(_filePathProvider.GetMetaFilePath(symbol /*, column*/), bTree); } var reader = _streamFactory.CreateReader(entry.Value.FilePath); if (reader != null) { yield return(reader); } start = entry.Value.End; } if (start < end) { foreach (var stream in CollectAndFetch(symbol, column, start, end)) { yield return(stream); } Persist(_filePathProvider.GetMetaFilePath(symbol /*, column*/), bTree); } }
public async Task BroadcastFile(string fileName, IByteSender byteSender, IStreamFactory streamFactory, IScheduler scheduler, float headerRebroadcastSeconds = 1, CancellationToken cancellationToken = default) { if (headerRebroadcastSeconds < 0) { throw new ArgumentException(nameof(headerRebroadcastSeconds)); } var cancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); var token = cancellationTokenSource.Token; var reader = streamFactory.CreateReader(fileName); const int byteToMegabyteFactor = 1000000; var pageSize = 8 * byteToMegabyteFactor; var pageObservable = reader.PageObservable(pageSize, token); const long broadcastIdByteCount = 16; //guid bytes const long fudgeAmount = 100; //the hash alg chosen has to be equal to or smaller than this var packetSize = byteSender.MaximumTransmittableBytes - broadcastIdByteCount - fudgeAmount; var partialPacketCache = new ConcurrentDictionary <long, byte[]>(); var payloadObservable = pageObservable .ObserveOn(scheduler) .SelectMany(page => { var firstPacketIndex = page.PageIndex / packetSize; var hasFirstFragmented = (page.PageIndex % packetSize) != 0; var packetList = new List <PayloadWrapper>(); var firstPartialPacketIndex = hasFirstFragmented ? firstPacketIndex : (long?)null; var secondPacketPayloadIndex = (firstPartialPacketIndex + 1) * packetSize; var firstPartialLength = (secondPacketPayloadIndex - page.PageIndex) ?? 0; if (hasFirstFragmented) { var partialBuffer = new byte[firstPartialLength]; Array.Copy(page.Bytes, partialBuffer, firstPartialLength); var firstPartialPacket = partialBuffer; var firstPayload = new PayloadWrapper() { PayloadIndex = firstPartialPacketIndex.Value, bytes = firstPartialPacket }; CachePartialPacket(partialPacketCache, firstPayload, packetList); } var firstFullPacketIndex = hasFirstFragmented ? firstPacketIndex + 1 : firstPacketIndex; var lastPageByteIndex = page.Bytes.Length - 1; var lastBytePayloadIndex = page.PageIndex + lastPageByteIndex; var lastPacketIndex = lastBytePayloadIndex / packetSize; var lastPartialLength = (page.Bytes.Length - firstPartialLength) % packetSize; var hasLastPartialPacket = lastPacketIndex > firstPacketIndex && (lastPartialLength > 0); var lastFullPacketIndex = hasLastPartialPacket ? lastPacketIndex - 1 : lastPacketIndex; //todo: consider parallel foreach for (long packetIndex = firstFullPacketIndex; packetIndex <= lastFullPacketIndex; packetIndex++) { var packetBuffer = new byte[packetSize]; var startPageIndex = ((packetIndex - firstFullPacketIndex) * packetSize) + firstPartialLength; Array.Copy(page.Bytes, startPageIndex, packetBuffer, 0, packetSize); var payload = new PayloadWrapper() { PayloadIndex = packetIndex, bytes = packetBuffer }; packetList.Add(payload); } if (hasLastPartialPacket) { var partialBuffer = new byte[lastPartialLength]; var lastPartialPageIndex = page.Bytes.Length - lastPartialLength; var lastPartialPacketIndex = lastPacketIndex; Array.Copy(page.Bytes, lastPartialPageIndex, partialBuffer, 0, lastPartialLength); var lastPayload = new PayloadWrapper() { PayloadIndex = lastPartialPacketIndex, bytes = partialBuffer }; CachePartialPacket(partialPacketCache, lastPayload, packetList); } return(packetList.AsEnumerable()); }) .Concat(partialPacketCache .Select(kvp => new PayloadWrapper { PayloadIndex = kvp.Key, bytes = kvp.Value }).ToObservable()); var broadcastId = Guid.NewGuid(); var md5 = MD5.Create(); var serializedPayloadObservable = payloadObservable .Select(payloadWrapper => { byte[] serializedPayload; var hash = md5.ComputeHash(payloadWrapper.bytes); var protoMessage = new ProtoMessage(broadcastId, payloadWrapper.PayloadIndex, payloadWrapper.bytes, null, null, null, hash); using (var memoryStream = new MemoryStream()) { Serializer.Serialize(memoryStream, protoMessage); serializedPayload = memoryStream.ToArray(); } return(serializedPayload); }) .Finally(() => { md5.Dispose(); }) .Publish(); byte[] serializedHeader; var packetCount = (long)Math.Ceiling((double)reader.Length / packetSize); var rebroadcastTime = TimeSpan.FromSeconds(headerRebroadcastSeconds); using (var memoryStream = new MemoryStream()) { Serializer.Serialize(memoryStream, new ProtoMessage(broadcastId, null, null, packetSize, fileName, packetCount, null)); serializedHeader = memoryStream.ToArray(); } var headerObservable = GetHeaderObservable(serializedHeader, rebroadcastTime, serializedPayloadObservable, scheduler); var packetObservable = headerObservable .Merge(serializedPayloadObservable); serializedPayloadObservable.Connect(); var sendObservable = packetObservable .ObserveOn(scheduler) .SelectMany((array, index) => { return(Observable.FromAsync(async c => { await byteSender.Send(array).ConfigureAwait(false); return Unit.Default; })); }); await sendObservable .ToTask(token).ConfigureAwait(false); }