public async Task Preload(string FilePath) { await Task.Run(() => { try { Dictionary <long, NodeBlock> nodeblocks = new Dictionary <long, NodeBlock>(); for (int header = 0; header < next.Length; header++) { for (int value = 0; value < next[header].Length; value++) { foreach (INodeBlock inb in next[header][value]) { if (inb is NodeBlock nb) { nodeblocks.Add(nb.Offset, nb); } } } } using (LockedNodeFileStream ns = new LockedNodeFileStream(new FileStream(FilePath, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, FileOptions.RandomAccess))) { using (FileStream fs = new FileStream(FilePath, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, FileOptions.RandomAccess)) { using (FileStream fsn = new FileStream(FilePath, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, FileOptions.RandomAccess)) { byte[] offsetBytes = new byte[DiskNode.HEADER_BYTES]; fs.Read(offsetBytes, 0, offsetBytes.Length); long JsonOffset = offsetBytes.GetLong(0); long SortOffset = offsetBytes.GetLong(8); fs.Seek(SortOffset, SeekOrigin.Begin); ulong freeMem = SystemInterop.Memory.Status.ullAvailPhys; while (freeMem > this.Settings.MinFreeMemory + this.Settings.RangeFreeMemory) { for (int i = 0; i < this.Settings.PreloadChunkSize / 2; i++) { if (fs.Position >= JsonOffset) { return; } byte[] thisNodeBytes = new byte[5]; fs.Read(thisNodeBytes, 0, thisNodeBytes.Length); long offset = thisNodeBytes.GetInt40(); if (!NoCache.Contains(offset)) { if (!nodeblocks.TryGetValue(offset, out NodeBlock nb)) { continue; } DiskNode dn = new DiskNode(ns, offset); if (dn.NextOffset != 0) { long bLength = dn.NextOffset - dn.Offset; ByteCache cachedBytes = new ByteCache() { Data = new byte[bLength] }; fsn.Seek(dn.Offset, SeekOrigin.Begin); fsn.Read(cachedBytes.Data, 0, (int)bLength); CachedBytes[nb.Index] = cachedBytes; freeMem -= (ulong)cachedBytes.Data.Length; } } if (freeMem < this.Settings.MinFreeMemory + this.Settings.RangeFreeMemory) { return; } } freeMem = SystemInterop.Memory.Status.ullAvailPhys; } } } } } catch (Exception ex) { if (Debugger.IsAttached) { Debugger.Break(); } } }); }
public override void Evaluate(Evaluation e, long routeKey, bool MultiThread = true) { ExecutingEvaluations++; try { if (e is null) { throw new ArgumentNullException(nameof(e)); } string FilePath = DiskNode._backingStream.FilePath; object streamLock = new object(); using (FileStream fs = new FileStream(FilePath, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, FileOptions.RandomAccess)) { List <INodeBlock> matchingNodes = new List <INodeBlock>(LastMatchAmount); for (int header = 0; header < next.Length; header++) { int value = e.DataRow[header]; if (next[header].Length > value) { matchingNodes.AddRange(next[header][value]); } } LastMatchAmount = Math.Max(LastMatchAmount, matchingNodes.Count); void Evaluate(INodeBlock n) { if (n is DiskNode dn) { dn.Evaluate(e, 0, MultiThread); } else if (n is NodeBlock nb) { long bLength = n.NextOffset - n.Offset; if (!ArrayPool.TryDequeue(out byte[] backingData) || backingData.Length < bLength) { backingData = new byte[bLength]; } ByteCache cachedBytes = CachedBytes[nb.Index]; if (cachedBytes.Data is null) { cachedBytes = new ByteCache() { Data = new byte[bLength] }; lock (streamLock) { fs.Seek(n.Offset, SeekOrigin.Begin); fs.Read(cachedBytes.Data, 0, (int)bLength); } CachedBytes[nb.Index] = cachedBytes; } cachedBytes.SetLast(); cachedBytes.Data.CopyTo(backingData, 0); DiskNode nn = new DiskNode(backingData, n.Offset, n.Offset); nn.Evaluate(e, 0, MultiThread); } } if (MultiThread) { Parallel.ForEach(matchingNodes, Evaluate); } else { foreach (INodeBlock nb in matchingNodes) { Evaluate(nb); } } } } finally { ExecutingEvaluations--; FlushMemory(); } }