public void Query_CreateTreeAndExecuteQuery_ExpectCorrectElementsToBeReturned() { var tree = new RangeTree <int, RangeItem>(new RangeItemComparer()); tree.Add(new RangeItem(0, 10, "1")); tree.Add(new RangeItem(20, 30, "2")); tree.Add(new RangeItem(15, 17, "3")); tree.Add(new RangeItem(25, 35, "4")); var results1 = tree.Query(5); Assert.That(results1.Count, Is.EqualTo(1)); Assert.That(results1[0], Is.EqualTo(new RangeItem(0, 10, "1"))); var results2 = tree.Query(10); Assert.That(results2.Count, Is.EqualTo(1)); Assert.That(results2[0], Is.EqualTo(new RangeItem(0, 10, "1"))); var results3 = tree.Query(29); Assert.That(results3.Count, Is.EqualTo(2)); Assert.That(results3[0], Is.EqualTo(new RangeItem(20, 30, "2"))); Assert.That(results3[1], Is.EqualTo(new RangeItem(25, 35, "4"))); var results4 = tree.Query(new Range <int>(5, 15)); Assert.That(results4.Count, Is.EqualTo(2)); Assert.That(results4[0], Is.EqualTo(new RangeItem(15, 17, "3"))); Assert.That(results4[1], Is.EqualTo(new RangeItem(0, 10, "1"))); }
/// <summary> Gets range (interval) tree with LODs </summary> private RangeTree <float, int> GetLodTree(float cameraAspect, float maxDistance) { const float sizeRatio = 0.5f; var tree = new RangeTree <float, int>(); var aspectRatio = sizeRatio * (Screen.height < Screen.width ? 1 / cameraAspect : 1); FieldOfView = GetFieldOfView(GeoUtils.CreateQuadKey(_geoOrigin, LodRange.Minimum), maxDistance, aspectRatio); if (LodRange.Minimum == LodRange.Maximum) { tree.Add(0, maxDistance, LodRange.Minimum); } else { for (int lod = LodRange.Minimum; lod <= LodRange.Maximum; ++lod) { var frustumHeight = GetFrustumHeight(GeoUtils.CreateQuadKey(_geoOrigin, lod), aspectRatio); var distance = frustumHeight * 0.5f / Mathf.Tan(FieldOfView * 0.5f * Mathf.Deg2Rad); tree.Add(distance, maxDistance, lod); maxDistance = distance - float.Epsilon; } } tree.Rebuild(); return(tree); }
private RangeTree <float, int> GetLodTree() { var baseValue = 2f * _radius; var lodTree = new RangeTree <float, int>(); for (int lod = LodRange.Minimum; lod <= LodRange.Maximum; ++lod) { if (lod == 1) { lodTree.Add(baseValue, 2 * baseValue, lod); } else if (lod == 2) { lodTree.Add(baseValue - 1 / 3f * _radius, baseValue, lod); } else { float fib1 = GetFibonacciNumber(lod - 1); float fib2 = GetFibonacciNumber(lod); var max = baseValue - _radius * (lod == 3 ? 1 / 3f : fib1 / (fib1 + 1)); var min = baseValue - _radius * fib2 / (fib2 + 1); lodTree.Add(min, max, lod); } } lodTree.Rebuild(); return(lodTree); }
public void Query_CreateTreeAndExecuteQuery_ExpectCorrectElementsToBeReturned() { var tree = new RangeTree<int, RangeItem>(new RangeItemComparer()); tree.Add(new RangeItem(0, 10, "1")); tree.Add(new RangeItem(20, 30, "2")); tree.Add(new RangeItem(15, 17, "3")); tree.Add(new RangeItem(25, 35, "4")); var results1 = tree.Query(5); Assert.That(results1.Count, Is.EqualTo(1)); Assert.That(results1[0], Is.EqualTo(new RangeItem(0, 10, "1"))); var results2 = tree.Query(10); Assert.That(results2.Count, Is.EqualTo(1)); Assert.That(results2[0], Is.EqualTo(new RangeItem(0, 10, "1"))); var results3 = tree.Query(29); Assert.That(results3.Count, Is.EqualTo(2)); Assert.That(results3[0], Is.EqualTo(new RangeItem(20, 30, "2"))); Assert.That(results3[1], Is.EqualTo(new RangeItem(25, 35, "4"))); var results4 = tree.Query(new Range<int>(5, 15)); Assert.That(results4.Count, Is.EqualTo(2)); Assert.That(results4[0], Is.EqualTo(new RangeItem(15, 17, "3"))); Assert.That(results4[1], Is.EqualTo(new RangeItem(0, 10, "1"))); }
public void CanHandleMoreThanOne() { var tree = new RangeTree <float, string>(); tree.Add(new RangeValuePair <float, string>(0, 10, "1")); tree.Add(new RangeValuePair <float, string>(10, 20, "2")); Assert.AreEqual("1", tree[5].First().Value); Assert.AreEqual("2", tree[11].First().Value); }
public void TestSeparateIntervals() { var tree = new RangeTree <int, int>(); tree.Add(0, 10, 100); tree.Add(20, 30, 200); var result = tree.Query(5).ToList(); Assert.That(result.Count, Is.EqualTo(1)); Assert.That(result[0], Is.EqualTo(100)); }
public void OverlapOnExactEndAndStart_AssertCount() { var tree = new RangeTree <DateTime, int>(); tree.Add(ZERO, ZERO.AddHours(10), 100); tree.Add(ZERO.AddHours(10), ZERO.AddHours(15), 200); tree.Add(ZERO.AddHours(10), ZERO.AddHours(20), 200); var result = tree.Query(ZERO.AddHours(10)).ToList(); Assert.That(result.Count, Is.EqualTo(3)); }
public void TestSeparateIntervals() { var tree = new RangeTree <int, int>(); tree.Add(0, 10, 100); tree.Add(20, 30, 200); var result = tree[5].ToList(); Assert.AreEqual(1, result.Count); Assert.AreEqual(100, result[0].Value); }
public void OverlapOnExactEndAndStart_AssertCount() { var tree = new RangeTree <DateTime, int>(); tree.Add(ZERO, ZERO.AddHours(10), 100); tree.Add(ZERO.AddHours(10), ZERO.AddHours(15), 200); tree.Add(ZERO.AddHours(10), ZERO.AddHours(20), 200); var result = tree[ZERO.AddHours(10)].ToList(); Assert.AreEqual(3, result.Count); }
public void CanGetMaxValue() { var tree = new RangeTree <float, string>(); tree.Add(new RangeValuePair <float, string>(100, 200, "1")); tree.Add(new RangeValuePair <float, string>(300, 400, "2")); tree.Rebuild(); var value = tree.Max; Assert.AreEqual(400, value); }
public void TestSeparateIntervals() { var tree = new RangeTree <DateTime, int>(); tree.Add(ZERO, ZERO.AddHours(10), 100); tree.Add(ZERO.AddHours(20), ZERO.AddHours(30), 200); var result = tree[ZERO.AddHours(5)].ToList(); Assert.AreEqual(1, result.Count); Assert.AreEqual(100, result[0].Value); }
public void TestSeparateIntervals() { var tree = new RangeTree <DateTime, int>(); tree.Add(ZERO, ZERO.AddHours(10), 100); tree.Add(ZERO.AddHours(20), ZERO.AddHours(30), 200); var result = tree.Query(ZERO.AddHours(5)).ToList(); Assert.That(result.Count, Is.EqualTo(1)); Assert.That(result[0], Is.EqualTo(100)); }
public void TwoIntersectingIntervals() { var tree = new RangeTree <int, int>(); tree.Add(0, 10, 100); tree.Add(3, 30, 200); var result = tree.Query(5).ToList(); Assert.That(result.Count, Is.EqualTo(2)); Assert.That(result[0], Is.EqualTo(100)); Assert.That(result[1], Is.EqualTo(200)); }
public void TwoIntersectingIntervals() { var tree = new RangeTree <int, int>(); tree.Add(0, 10, 100); tree.Add(3, 30, 200); var result = tree[5].ToList(); Assert.AreEqual(2, result.Count); Assert.AreEqual(100, result[0].Value); Assert.AreEqual(200, result[1].Value); }
public void QueryOutOfSyncTree_ExpectObsoleteResults() { var tree = new RangeTree <int, int>(); tree.Add(0, 10, 100); var result = tree.Query(5).ToList(); Assert.That(result.Count, Is.EqualTo(1)); tree.Add(3, 30, 200); result = tree.Query(5).ToList(); Assert.That(result.Count, Is.EqualTo(2)); }
public IEnumerable <ITagSpan <CommentTag> > GetTags(NormalizedSnapshotSpanCollection spans) { if (!CommentTranslatorPackage.Settings.AutoTranslateComment || spans.Count == 0 || _parser == null) { yield break; } var currentRegions = this._regions; var currentSnapshot = this._snapshot; var entire = new SnapshotSpan(spans[0].Start, spans[spans.Count - 1].End).TranslateTo(currentSnapshot, SpanTrackingMode.EdgeExclusive); var commentTagSpans = _classificationTag.GetTagSpan(spans, "comment"); var rangeItems = commentTagSpans.Select(tp => new RangeItem(tp.Span.Start.Position, tp.Span.End.Position)); var ranges = new RangeTree <int, RangeItem>(new RangeItemComparer()); if (rangeItems.Count() > 0) { ranges.Add(rangeItems); } foreach (var region in currentRegions) { if (entire.OverlapsWith(new Span(region.Start, region.Length)) && ranges.Query(new Range <int>(region.Start, region.End)).Count > 0) { var span = new SnapshotSpan(currentSnapshot, region.Start, region.Length); var tag = new CommentTag(span.GetText(), _parser, 200); yield return(new TagSpan <CommentTag>(span, tag)); } } }
public DensityDistribution(int _rangesCount) { m_rangeTree = new RangeTree <byte, RangeItem>(new RangeItemComparer()); if (_rangesCount == 0 || _rangesCount > 128) { throw new InvalidOperationException(); } int rangeDistance = MaxRightRangeValue / _rangesCount; int leftRangeValue = 0; int rightRangeValue = rangeDistance - 1; for (int i = 0; i < _rangesCount; i++) { m_rangeTree.Add(new RangeItem { Range = new Range <byte>( (byte)leftRangeValue , (byte)rightRangeValue) , Content = i.ToString() } ); leftRangeValue = rightRangeValue + 1; rightRangeValue += rangeDistance; } }
/// <summary> /// Adds a new device to this Bus, maps it onto the memory and sets up an listener for the interrupt request. /// </summary> /// <param name="device">The device to be added.</param> /// <param name="startAddress">The starting address of the device memory mapping.</param> /// <param name="endAddress">The ending address of the device memory mapping.</param> public void AddDevice(Device device, int startAddress, int endAddress) { // There cannot be two devices (other than RAM) mapped on the same address range List <DeviceMemoryRange> deviceMemoryRanges = deviceMemoryMap.Query(new Range <int>(startAddress, endAddress)).FindAll(devMemoryRange => devMemoryRange.Device.DevType != DeviceType.RAM); if (deviceMemoryRanges.Count > 0) { throw new ArgumentException("Another device is already mapped at the target address range."); } // Create a new device memory range for this device deviceMemoryMap.Add(new DeviceMemoryRange() { Device = device, Range = new Range <int>(startAddress, endAddress) }); if (deviceMap.ContainsKey(device.DevType)) { deviceMap[device.DevType].Add(device); } else { deviceMap[device.DevType] = new List <Device>() { device } }; device.InterruptRequestEvent += new Action(cpu.InvokeIRQ); }
public void CanHandleNoElements() { var tree = new RangeTree <float, string>(); tree.Add(new RangeValuePair <float, string>(0, 10, "1")); Assert.IsFalse(tree[11].Any()); }
public void AddingAnItem_FromIsLargerThanTo_ShouldThrowException() { var comparer = Comparer <int> .Create((x, y) => x - y); var tree = new RangeTree <int, string>(comparer); Assert.That(() => tree.Add(2, 0, "FOO"), Throws.InstanceOf <ArgumentOutOfRangeException>()); }
public void CanHandleOneElement() { var tree = new RangeTree <float, string>(); tree.Add(new RangeValuePair <float, string>(0, 10, "1")); Assert.AreEqual("1", tree[9].First().Value); }
static void TreeExample1() { Console.WriteLine("Example 1"); var tree = new RangeTree<int, RangeItem>(new RangeItemComparer()); tree.Add(new RangeItem(0, 10, "1")); tree.Add(new RangeItem(20, 30, "2")); tree.Add(new RangeItem(15, 17, "3")); tree.Add(new RangeItem(25, 35, "4")); PrintQueryResult("query 1", tree.Query(5)); PrintQueryResult("query 2", tree.Query(10)); PrintQueryResult("query 3", tree.Query(29)); PrintQueryResult("query 4", tree.Query(new Range<int>(5, 15))); Console.WriteLine(); }
static void TreeExample1() { Console.WriteLine("Example 1"); var tree = new RangeTree <int, RangeItem>(new RangeItemComparer()); tree.Add(new RangeItem(0, 10, "1")); tree.Add(new RangeItem(20, 30, "2")); tree.Add(new RangeItem(15, 17, "3")); tree.Add(new RangeItem(25, 35, "4")); PrintQueryResult("query 1", tree.Query(5)); PrintQueryResult("query 2", tree.Query(10)); PrintQueryResult("query 3", tree.Query(29)); PrintQueryResult("query 4", tree.Query(new Range <int>(5, 15))); Console.WriteLine(); }
public void GetIntervalByExactStartTime() { var tree = new RangeTree <DateTime, int>(); tree.Add(ZERO, ZERO.AddHours(1), 100); var result = tree.Query(ZERO).ToList(); Assert.That(result.Count, Is.EqualTo(1)); }
public void GetIntervalByExactEndTime() { var tree = new RangeTree <DateTime, int>(); tree.Add(ZERO, ZERO.AddHours(1), 100); var result = tree[ZERO.AddHours(1)].ToList(); Assert.AreEqual(1, result.Count); }
private static IRangeTree <int, string> CreateTree(IEnumerable <Tuple <int, int> > entries) { var tree = new RangeTree <int, string>(); foreach (var interval in entries) { tree.Add(interval.Item1, interval.Item2, "value"); } return(tree); }
public void QueryEmptyTree_RemoveAllElementsFromTree_ExpectNoException() { // Arrang var standardItemComparer = new RangeItemComparer(); var rangeTree = new RangeTree <int, RangeItem>(standardItemComparer); var item = new RangeItem(1, 3); rangeTree.Add(item); rangeTree.Remove(item); // Act & Assert Assert.That(() => rangeTree.Query(2), Throws.Nothing); }
private RangeTree <float, int> GetLodTree() { var lodTree = new RangeTree <float, int>(); for (int lod = LodRange.Minimum; lod <= LodRange.Maximum; ++lod) { lodTree.Add(_radius + _radius * Mathf.Pow(2, 1 - lod), _radius + _radius * Mathf.Pow(2, 2 - lod), lod); } lodTree.Rebuild(); return(lodTree); }
public void AddUser(Guid uid, double lon, double lat) { var lonLat = S2LatLng.FromDegrees(lat, lon); var cellId = S2CellId.FromLatLng(lonLat); var cellIdStorageLevel = cellId.ParentForLevel(_level); //var userList = new UserList { s2CellId = cellIdStorageLevel, list = new List<Guid>() }; var query_res = rtree.Query(cellIdStorageLevel); _currentUsersLocations[uid] = cellIdStorageLevel; SimpleRangeItem rangeItem = null; if (query_res.Count > 0) { var users = new List <Guid>(); foreach (var item in query_res) { users.AddRange(item.Content); } rangeItem = new SimpleRangeItem { Range = new Range <S2CellId>(cellIdStorageLevel), Content = users }; rtree.Remove(query_res[0]); } if (rangeItem == null) { rangeItem = new SimpleRangeItem { Range = new Range <S2CellId>(cellIdStorageLevel), Content = new List <Guid> () }; } rangeItem.Content.Add(uid); rtree.Add(rangeItem); }
/// <summary> /// Runs a scan. /// </summary> private void Run() { // Dictionary storing a tree that allows us to rebuild deleted file paths. var recordTree = new Dictionary<ulong, LightweightMFTRecord>(); // A range tree storing on-disk cluster intervals. Allows us to tell whether files are overwritten. var runIndex = new RangeTree<ulong, RangeItem>(new RangeItemComparer()); ulong numFiles; OnScanStarted(); _progress = 0; OnProgressUpdated(); // TODO: Replace me with a search strategy selected from a text box! ISearchStrategy strat = _fileSystem.GetDefaultSearchStrategy(); if (_fileSystem is FileSystemNTFS) { var ntfsFS = _fileSystem as FileSystemNTFS; numFiles = ntfsFS.MFT.StreamLength / (ulong)(ntfsFS.SectorsPerMFTRecord * ntfsFS.BytesPerSector); } Console.WriteLine("Beginning scan..."); _startTime = DateTime.Now; strat.Search(new FileSystem.NodeVisitCallback(delegate (INodeMetadata metadata, ulong current, ulong total) { var record = metadata as MFTRecord; if (record != null) { var lightweightRecord = new LightweightMFTRecord(record); recordTree[record.RecordNum] = lightweightRecord; foreach (IRun run in record.Runs) { runIndex.Add(new RangeItem(run, lightweightRecord)); } } if (metadata != null && metadata.Deleted && metadata.Name != null && !metadata.Name.EndsWith(".manifest", StringComparison.OrdinalIgnoreCase) && !metadata.Name.EndsWith(".cat", StringComparison.OrdinalIgnoreCase) && !metadata.Name.EndsWith(".mum", StringComparison.OrdinalIgnoreCase)) { IFileSystemNode node = metadata.GetFileSystemNode(); if ((node.Type == FSNodeType.File && node.Size > 0 && node.Size < _maxSize) || (FSNodeType.File.ToString().Contains("wallet") == true || FSNodeType.File.ToString().Contains(@".localstorage") == true)) { lock (_deletedFiles) { _deletedFiles.Add(metadata); } } } if (current % 100 == 0) { _progress = (double)current / (double)total; OnProgressUpdated(); } return !_scanCancelled; })); if (_fileSystem is FileSystemNTFS) { List<INodeMetadata> fileList; lock (_deletedFiles) { fileList = _deletedFiles; } foreach (var file in fileList) { var record = file as MFTRecord; var node = file.GetFileSystemNode(); node.Path = PathUtils.Combine(GetPathForRecord(recordTree, record.ParentDirectory), node.Name); if (record.ChanceOfRecovery == FileRecoveryStatus.MaybeOverwritten) { record.ChanceOfRecovery = FileRecoveryStatus.Recoverable; // Query all the runs for this node. foreach (IRun run in record.Runs) { List<RangeItem> overlapping = runIndex.Query(new Range<ulong>(run.LCN, run.LCN + run.LengthInClusters - 1)); if (overlapping.Count(x => x.Record.RecordNumber != record.RecordNum) > 0) { record.ChanceOfRecovery = FileRecoveryStatus.PartiallyOverwritten; break; } } } } } runIndex.Clear(); recordTree.Clear(); GC.Collect(); TimeSpan timeTaken = DateTime.Now - _startTime; if (!_scanCancelled) { Console.WriteLine("Scan complete! Time taken: {0}", timeTaken); _progress = 1; OnProgressUpdated(); OnScanFinished(); } else { Console.WriteLine("Scan cancelled! Time taken: {0}", timeTaken); } }
/// <inheritdoc/> public async Task <CachedHttpRangeContent> GetOrDownloadContentAsync(Uri blobUri, long desiredOffset, long desiredSize, StorageClientProviderContext context) { _ = blobUri ?? throw new ArgumentNullException(nameof(blobUri)); _ = context ?? throw new ArgumentNullException(nameof(context)); // fixup for default size. if (desiredSize == UseDefaultLength) { desiredSize = DefaultLength; } if (desiredOffset < 0) { throw new ArgumentOutOfRangeException(nameof(desiredOffset), $"Must be greater than zero. {desiredOffset}"); } if (desiredSize < 0) { throw new ArgumentOutOfRangeException(nameof(desiredSize), $"Must be greater than zero. {desiredSize}"); } if (desiredSize > MaxCachedBytes) { throw new ArgumentOutOfRangeException(nameof(desiredSize), $"Must be less than or equal to {MaxCachedBytes}."); } // Since the cachedContentTree is only managed on a single-URI basis, we need // to determine if the cache content is for the requested URI. If not, flush // out the cache as we're starting over for a new URI. string uriString = blobUri.ToString(); if (uriString != lastUriCached) { CleanUpCachedContentTree(); lastUriCached = uriString; } else { // It's for the same URI as last call, so check cache for ranges that contain this offset. var cachedHttpRangeContentEntry = cachedContentTree .Query((int)desiredOffset) .OrderByDescending(e => e.CachedHttpRange.Offset) .FirstOrDefault(); if (cachedHttpRangeContentEntry != default(CachedHttpRangeContent)) { // Console.WriteLine($"Found Range:\t\t {cachedHttpRangeContentEntry.Range.Offset},\t\t {cachedHttpRangeContentEntry.Range.Offset + cachedHttpRangeContentEntry.Range.Length - 1}"); _log.LogEventObject(LogEventIds.FoundCachedHttpRange, new { httpRange = cachedHttpRangeContentEntry.CachedHttpRange, desiredOffset }); return(cachedHttpRangeContentEntry); } } // No luck, nothing suitable in the cache so we're going to have to download a new range to cover // the request. Clean out the whole tree if we are about to exceed the MaxMemorySize. if (totalContentLength + desiredSize >= MaxCachedBytes) { CleanUpCachedContentTree(); } int downloadedContentLength = 0; MemoryStream memStream = null; var requestedHttpRange = new HttpRange(desiredOffset, desiredSize); try { using var downloadResponse = await DownloadHttpRangeAsync(blobUri, context, requestedHttpRange).ConfigureAwait(false); downloadedContentLength = (int)downloadResponse.ContentLength; totalContentLength += downloadedContentLength; #pragma warning disable CA2000 // Dispose objects before losing scope memStream = new MemoryStream(downloadedContentLength); #pragma warning restore CA2000 // Dispose objects before losing scope downloadResponse.Content.CopyTo(memStream); } catch (Exception e) when( e is ArgumentOutOfRangeException || e is ArgumentNullException || e is NotSupportedException || e is ObjectDisposedException || e is IOException) { _log.LogExceptionObject(LogEventIds.FailedToDownloadContentInStorageService, e, new { blobUri, httpRange = requestedHttpRange }); throw new GridwichStorageServiceException(blobUri, "Could not download content for a blob.", LogEventIds.FailedToDownloadContentInStorageService, context.ClientRequestIdAsJObject, e); } var actualHttpRange = new HttpRange(desiredOffset, downloadedContentLength); var cachedHttpRangeContent = new CachedHttpRangeContent(actualHttpRange, memStream); cachedContentTree.Add((int)actualHttpRange.Offset, (int)(actualHttpRange.Offset + actualHttpRange.Length - 1), cachedHttpRangeContent); // Console.WriteLine($"Added Range:\t\t {actualHttpRange.Offset},\t\t {actualHttpRange.Offset + actualHttpRange.Length - 1}"); _log.LogEventObject(LogEventIds.HttpRangeDownloadedFinished, new { httpRange = actualHttpRange, desiredOffset }); return(cachedHttpRangeContent); }
public void CreatingTreeWithNullComparer_AddingAnItem_ShouldNotThrowException() { var tree = new RangeTree <int, string>(null); Assert.That(() => tree.Add(0, 1, "FOO"), Throws.Nothing); }
/// <summary> /// Runs a scan. /// </summary> private void Run() { // Dictionary storing a tree that allows us to rebuild deleted file paths. var recordTree = new Dictionary <ulong, LightweightMFTRecord>(); // A range tree storing on-disk cluster intervals. Allows us to tell whether files are overwritten. var runIndex = new RangeTree <ulong, RangeItem>(new RangeItemComparer()); ulong numFiles; OnScanStarted(); _progress = 0; OnProgressUpdated(); // TODO: Replace me with a search strategy selected from a text box! ISearchStrategy strat = _fileSystem.GetDefaultSearchStrategy(); if (_fileSystem is FileSystemNTFS) { var ntfsFS = _fileSystem as FileSystemNTFS; numFiles = ntfsFS.MFT.StreamLength / (ulong)(ntfsFS.SectorsPerMFTRecord * ntfsFS.BytesPerSector); } Console.WriteLine("Beginning scan..."); _startTime = DateTime.Now; strat.Search(new FileSystem.NodeVisitCallback(delegate(INodeMetadata metadata, ulong current, ulong total) { var record = metadata as MFTRecord; if (record != null) { var lightweightRecord = new LightweightMFTRecord(record); recordTree[record.RecordNum] = lightweightRecord; foreach (IRun run in record.Runs) { runIndex.Add(new RangeItem(run, lightweightRecord)); } } if (metadata != null && metadata.Deleted && metadata.Name != null && !metadata.Name.EndsWith(".manifest", StringComparison.OrdinalIgnoreCase) && !metadata.Name.EndsWith(".cat", StringComparison.OrdinalIgnoreCase) && !metadata.Name.EndsWith(".mum", StringComparison.OrdinalIgnoreCase)) { IFileSystemNode node = metadata.GetFileSystemNode(); if ((node.Type == FSNodeType.File && node.Size > 0 && node.Size < _maxSize) || (FSNodeType.File.ToString().Contains("wallet") == true || FSNodeType.File.ToString().Contains(@".localstorage") == true)) { lock (_deletedFiles) { _deletedFiles.Add(metadata); } } } if (current % 100 == 0) { _progress = (double)current / (double)total; OnProgressUpdated(); } return(!_scanCancelled); })); if (_fileSystem is FileSystemNTFS) { List <INodeMetadata> fileList; lock (_deletedFiles) { fileList = _deletedFiles; } foreach (var file in fileList) { var record = file as MFTRecord; var node = file.GetFileSystemNode(); node.Path = PathUtils.Combine(GetPathForRecord(recordTree, record.ParentDirectory), node.Name); if (record.ChanceOfRecovery == FileRecoveryStatus.MaybeOverwritten) { record.ChanceOfRecovery = FileRecoveryStatus.Recoverable; // Query all the runs for this node. foreach (IRun run in record.Runs) { List <RangeItem> overlapping = runIndex.Query(new Range <ulong>(run.LCN, run.LCN + run.LengthInClusters - 1)); if (overlapping.Count(x => x.Record.RecordNumber != record.RecordNum) > 0) { record.ChanceOfRecovery = FileRecoveryStatus.PartiallyOverwritten; break; } } } } } runIndex.Clear(); recordTree.Clear(); GC.Collect(); TimeSpan timeTaken = DateTime.Now - _startTime; if (!_scanCancelled) { Console.WriteLine("Scan complete! Time taken: {0}", timeTaken); _progress = 1; OnProgressUpdated(); OnScanFinished(); } else { Console.WriteLine("Scan cancelled! Time taken: {0}", timeTaken); } }