private void PrepareIndexUsingPathNameSource(out Dictionary <string, byte[]> childIndex) { const int estimatedObjectCount = 400; var fileInfo = new FileInfo(_childPathname); childIndex = new Dictionary <string, byte[]>((int)(fileInfo.Length / estimatedObjectCount), StringComparer.OrdinalIgnoreCase); using (var prepper = new MakeRecordDictionary(childIndex, _childPathname, _firstElementTag, _startTag, _identfierAttribute)) { prepper.ShouldContinueAfterDuplicateKey = s => { _eventListener.WarningOccurred(new MergeWarning(_childPathname + ": " + s)); return(true); }; prepper.Run(); } }
private void PrepareIndicesUsingPathNameSource(string parentPathname, out Dictionary <string, byte[]> parentIndex, string childPathname, out Dictionary <string, byte[]> childIndex) { // This arbitrary length (400) is based on two large databases, // one 360M with 474 bytes/object, and one 180M with 541. // It's probably not perfect, but we're mainly trying to prevent // fragmenting the large object heap by growing it MANY times. const int estimatedObjectCount = 400; var fileInfo = new FileInfo(parentPathname); parentIndex = new Dictionary <string, byte[]>((int)(fileInfo.Length / estimatedObjectCount), StringComparer.OrdinalIgnoreCase); using (var prepper = new MakeRecordDictionary(parentIndex, parentPathname, _firstElementTag, _startTag, _identfierAttribute)) { prepper.ShouldContinueAfterDuplicateKey = s => { _eventListener.WarningOccurred(new MergeWarning(parentPathname + ": " + s)); return(true); }; prepper.Run(); } fileInfo = new FileInfo(childPathname); childIndex = new Dictionary <string, byte[]>((int)(fileInfo.Length / estimatedObjectCount), StringComparer.OrdinalIgnoreCase); using (var prepper = new MakeRecordDictionary(childIndex, childPathname, _firstElementTag, _startTag, _identfierAttribute)) { prepper.ShouldContinueAfterDuplicateKey = s => { _eventListener.WarningOccurred(new MergeWarning(childPathname + ": " + s)); return(true); }; prepper.Run(); } }