public void Dispose() { if (_tree != null) { _tree.Dispose(); } if (_transitionTree != null) { _transitionTree.Dispose(); } }
void TestRecoveryOnNew(BPlusTree<Guid, TestInfo>.OptionsV2 options, int count, int added) { BPlusTree<Guid, TestInfo> tree = null; var temp = TempFile.Attach(options.FileName); Dictionary<Guid, TestInfo> data = new Dictionary<Guid, TestInfo>(); try { Assert.IsNotNull(options.TransactionLog); temp.Delete(); tree = new BPlusTree<Guid, TestInfo>(options); using (var log = options.TransactionLog) { using ((IDisposable)new PropertyValue(tree, "_storage").Value) Insert(tree, data, Environment.ProcessorCount, count, TimeSpan.MaxValue); //Add extra data... AppendToLog(log, TestInfo.Create(added, data)); } tree = null; //No file... yet... Assert.IsFalse(File.Exists(options.FileName)); //Now recover... using (var recovered = new BPlusTree<Guid, TestInfo>(options)) { TestInfo.AssertEquals(data, recovered); } Assert.IsTrue(File.Exists(options.FileName)); } finally { temp.Dispose(); if (tree != null) tree.Dispose(); } }
void TestRecoveryOnExisting(BPlusTree <Guid, TestInfo> .OptionsV2 options, int count, int added) { BPlusTree <Guid, TestInfo> tree = null; var temp = TempFile.Attach(options.FileName); Dictionary <Guid, TestInfo> dataFirst, data = new Dictionary <Guid, TestInfo>(); try { temp.Delete(); Assert.IsNotNull(options.TransactionLog); using (tree = new BPlusTree <Guid, TestInfo>(options)) { Insert(tree, data, 1, 100, TimeSpan.MaxValue); TestInfo.AssertEquals(data, tree); Assert.IsFalse(temp.Exists); } tree = null; Assert.IsTrue(File.Exists(options.TransactionLogFileName)); // All data commits to output file Assert.IsTrue(temp.Exists); TestInfo.AssertEquals(data, BPlusTree <Guid, TestInfo> .EnumerateFile(options)); dataFirst = new Dictionary <Guid, TestInfo>(data); DateTime modified = temp.Info.LastWriteTimeUtc; tree = new BPlusTree <Guid, TestInfo>(options); using (var log = options.TransactionLog) { using ((IDisposable) new PropertyValue(tree, "_storage").Value) Insert(tree, data, Environment.ProcessorCount, count, TimeSpan.MaxValue); //Add extra data... AppendToLog(log, TestInfo.Create(added, data)); } tree = null; //Still only contains original data Assert.AreEqual(modified, temp.Info.LastWriteTimeUtc); TestInfo.AssertEquals(dataFirst, BPlusTree <Guid, TestInfo> .EnumerateFile(options)); //Now recover... using (var recovered = new BPlusTree <Guid, TestInfo>(options)) { TestInfo.AssertEquals(data, recovered); } } finally { temp.Dispose(); if (tree != null) { tree.Dispose(); } } }
void TestRecoveryOnNew(BPlusTree <Guid, TestInfo> .OptionsV2 options, int count, int added) { BPlusTree <Guid, TestInfo> tree = null; var temp = TempFile.Attach(options.FileName); Dictionary <Guid, TestInfo> data = new Dictionary <Guid, TestInfo>(); try { Assert.IsNotNull(options.TransactionLog); temp.Delete(); tree = new BPlusTree <Guid, TestInfo>(options); using (var log = options.TransactionLog) { using ((IDisposable) new PropertyValue(tree, "_storage").Value) Insert(tree, data, Environment.ProcessorCount, count, TimeSpan.MaxValue); //Add extra data... AppendToLog(log, TestInfo.Create(added, data)); } tree = null; //No data... yet... using (TempFile testempty = TempFile.FromCopy(options.FileName)) { var testoptions = options.Clone(); testoptions.TransactionLogFileName = null; testoptions.TransactionLog = null; testoptions.FileName = testempty.TempPath; using (var empty = new BPlusTree <Guid, TestInfo>(testoptions)) { empty.EnableCount(); Assert.AreEqual(0, empty.Count); } } //Now recover... using (var recovered = new BPlusTree <Guid, TestInfo>(options)) { TestInfo.AssertEquals(data, recovered); } } finally { temp.Dispose(); if (tree != null) { tree.Dispose(); } } }
int StartAndAbortWriters(BPlusTreeOptions <KeyInfo, DataValue> options, TempFile copy) { RecordsCreated = 0; int minRecordCreated; BPlusTree <KeyInfo, DataValue> dictionary = new BPlusTree <KeyInfo, DataValue>(options); try { using (WorkQueue work = new WorkQueue(Environment.ProcessorCount)) { Exception lastError = null; work.OnError += delegate(object o, ErrorEventArgs e) { lastError = e.GetException(); }; Thread.Sleep(1); for (int i = 0; i < Environment.ProcessorCount; i++) { work.Enqueue(new ThreadedTest(dictionary, 10000000).Run); } while (RecordsCreated < 1000) { Thread.Sleep(1); } minRecordCreated = Interlocked.CompareExchange(ref RecordsCreated, 0, 0); if (copy != null) { File.Copy(options.FileName, copy.TempPath); //just grab a copy any old time. } work.Complete(false, 0); //hard-abort all threads //if(lastError != null) // Assert.AreEqual(typeof(InvalidDataException), lastError.GetType()); } // force the file to close without disposing the btree IDisposable tmp = (IDisposable) new PropertyValue(dictionary, "_storage").Value; tmp.Dispose(); } catch { dictionary.Dispose(); throw; } return(minRecordCreated); }
public void ReleaseLocalDb() { var lockInfo = new WriteLockInfo(); try { Lock(lockInfo); if (_localDb == null) { return; } _localDb.Dispose(); _localDb = null; } finally { Release(lockInfo); } }
public void Can_lookup() { var writeOptions = new BPlusTree <Term, DocumentPosting[]> .OptionsV2( new TermSerializer(), new ArraySerializer <DocumentPosting>(new PostingSerializer())); writeOptions.FileName = Path.Combine(Setup.Dir, string.Format("{0}-{1}.{2}", "Can_lookup", "db", "bpt")); writeOptions.CreateFile = CreatePolicy.Always; var write = new BPlusTree <Term, DocumentPosting[]>(writeOptions); write.Add(new Term("title", new Word("bad")), new [] { new DocumentPosting(1, 1) }); write.Add(new Term("title", new Word("blood")), new[] { new DocumentPosting(1, 1) }); write.Add(new Term("description", new Word("ape")), new[] { new DocumentPosting(1, 1) }); write.Dispose(); var readOptions = new BPlusTree <Term, DocumentPosting[]> .OptionsV2( new TermSerializer(), new ArraySerializer <DocumentPosting>(new PostingSerializer())); readOptions.FileName = Path.Combine(Setup.Dir, string.Format("{0}-{1}.{2}", "Can_lookup", "db", "bpt")); readOptions.ReadOnly = true; readOptions.LockingFactory = new IgnoreLockFactory(); var read = new BPlusTree <Term, DocumentPosting[]>(readOptions); Assert.IsTrue(read.ContainsKey(new Term("title", new Word("bad")))); Assert.IsTrue(read.ContainsKey(new Term("title", new Word("blood")))); Assert.IsFalse(read.ContainsKey(new Term("description", new Word("blood")))); Assert.IsTrue(read.ContainsKey(new Term("description", new Word("ape")))); var postings = read[new Term("title", new Word("bad"))]; Assert.That(postings[0].DocumentId, Is.EqualTo(1)); }
public void Dispose() { tree.Commit(); tree.Dispose(); }
public void Dispose() { bTree?.Dispose(); }
void TestRecoveryOnExisting(BPlusTree<Guid, TestInfo>.OptionsV2 options, int count, int added) { BPlusTree<Guid, TestInfo> tree = null; var temp = TempFile.Attach(options.FileName); Dictionary<Guid, TestInfo> dataFirst, data = new Dictionary<Guid, TestInfo>(); try { temp.Delete(); Assert.IsNotNull(options.TransactionLog); using (tree = new BPlusTree<Guid, TestInfo>(options)) { Insert(tree, data, 1, 100, TimeSpan.MaxValue); TestInfo.AssertEquals(data, tree); Assert.IsFalse(temp.Exists); } tree = null; Assert.IsTrue(File.Exists(options.TransactionLogFileName)); // All data commits to output file Assert.IsTrue(temp.Exists); TestInfo.AssertEquals(data, BPlusTree<Guid, TestInfo>.EnumerateFile(options)); dataFirst = new Dictionary<Guid, TestInfo>(data); DateTime modified = temp.Info.LastWriteTimeUtc; tree = new BPlusTree<Guid, TestInfo>(options); using (var log = options.TransactionLog) { using ((IDisposable) new PropertyValue(tree, "_storage").Value) Insert(tree, data, Environment.ProcessorCount, count, TimeSpan.MaxValue); //Add extra data... AppendToLog(log, TestInfo.Create(added, data)); } tree = null; //Still only contains original data Assert.AreEqual(modified, temp.Info.LastWriteTimeUtc); TestInfo.AssertEquals(dataFirst, BPlusTree<Guid, TestInfo>.EnumerateFile(options)); //Now recover... using (var recovered = new BPlusTree<Guid, TestInfo>(options)) { TestInfo.AssertEquals(data, recovered); } } finally { temp.Dispose(); if (tree != null) tree.Dispose(); } }
public void Dispose() { _postingDb.Dispose(); }
public override void Dispose() { maMeta.Dispose(); mmfBlock.Dispose(); index.Dispose(); }
/// <summary> /// Construct a new <see cref="UmbracoXmlParser"/> instance by parsing the supplied /// umbraco.config XML cache file or NuCache database file. /// </summary> /// <param name="umbracoConfigOrNuCacheDb">Full path to umbraco.config XML cache file or NuCache database file.</param> /// <param name="options">Options to provide mappings for URL prefixes, doctypes (Umbraco 8 only) and users (Umbraco 8 only).</param> public UmbracoXmlParser(string umbracoConfigOrNuCacheDb, UmbracoParsingOptions options) { // Save options if (options != null) { Options = options; } // Remove any trailing slashes from URL prefixes as we don't want them if (Options.UrlPrefixMapping != null) { foreach (var key in Options.UrlPrefixMapping.Keys.ToList()) { if (Options.UrlPrefixMapping[key].EndsWith("/")) { Options.UrlPrefixMapping[key] = Options.UrlPrefixMapping[key].TrimEnd('/'); } } } // No file? if (string.IsNullOrEmpty(umbracoConfigOrNuCacheDb)) { throw new ArgumentException(umbracoConfigOrNuCacheDb); } // Check first few bytes. If it's XML it will start with '<' (potentially after a BOM) byte[] buffer = new byte[10]; using (var stream = new FileStream(umbracoConfigOrNuCacheDb, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) { stream.Read(buffer, 0, 10); } // It's an umbraco 4 through 7 XML cache file if (buffer[0] == '<' || buffer[0] == 0xef && buffer[1] == 0xbb && buffer[2] == 0xbf && buffer[3] == '<') // UTF-8 BOM { try { // Load XML into an XDocument ParsedXml = XDocument.Load(umbracoConfigOrNuCacheDb); // Parse content into an in-memory dictionary of node ID and node information ParseXmlIntoUmbracoNodes(); // Destroy ParsedXml = null; return; } catch (UmbracoXmlParsingException ex) { ParsedXml = null; throw new UmbracoXmlParsingException($"Could not parse {umbracoConfigOrNuCacheDb} as XML - {ex.Message}"); } catch { ParsedXml = null; // Might be a NuCache file } } // Umbraco 8.0.1 or later NuCache db file try { var keySerializer = new PrimitiveSerializer(); var valueSerializer = new ContentNodeKitSerializer(); var bPlusTreeOptions = new BPlusTree <int, ContentNodeKit> .OptionsV2(keySerializer, valueSerializer) { CreateFile = CreatePolicy.Never, FileName = umbracoConfigOrNuCacheDb, ReadOnly = true }; // Read the file into a BPlusTreeObject ParsedTree = new BPlusTree <int, ContentNodeKit>(bPlusTreeOptions); } catch (Exception ex) { throw new UmbracoXmlParsingException($"Could not parse {umbracoConfigOrNuCacheDb} as a NuCache DB - {ex.Message}"); } // Parse content into an in-memory dictionary of node ID and node information ParseTreeIntoUmbracoNodes(); // Destroy ParsedTree.Dispose(); ParsedTree = null; }
void TestRecoveryOnNew(BPlusTree<Guid, TestInfo>.OptionsV2 options, int count, int added) { BPlusTree<Guid, TestInfo> tree = null; var temp = TempFile.Attach(options.FileName); Dictionary<Guid, TestInfo> data = new Dictionary<Guid, TestInfo>(); try { Assert.IsNotNull(options.TransactionLog); temp.Delete(); tree = new BPlusTree<Guid, TestInfo>(options); using (var log = options.TransactionLog) { using ((IDisposable)new PropertyValue(tree, "_storage").Value) Insert(tree, data, Environment.ProcessorCount, count, TimeSpan.MaxValue); //Add extra data... AppendToLog(log, TestInfo.Create(added, data)); } tree = null; //No data... yet... using(TempFile testempty = TempFile.FromCopy(options.FileName)) { var testoptions = options.Clone(); testoptions.TransactionLogFileName = null; testoptions.TransactionLog = null; testoptions.FileName = testempty.TempPath; using (var empty = new BPlusTree<Guid, TestInfo>(testoptions)) { empty.EnableCount(); Assert.AreEqual(0, empty.Count); } } //Now recover... using (var recovered = new BPlusTree<Guid, TestInfo>(options)) { TestInfo.AssertEquals(data, recovered); } } finally { temp.Dispose(); if (tree != null) tree.Dispose(); } }
public HttpResponseMessage GetNuCacheData(string filePath) { //Check for valid filepath if (File.Exists(filePath) == false) { var message = $"No file exists on disk at {filePath}"; return(Request.CreateErrorResponse(HttpStatusCode.NotFound, message)); } //Check for file extension ends with .db //Don't want to attempt to any old file type if (Path.GetExtension(filePath) != ".db") { var message = $"The file {filePath} is not a .db file"; return(Request.CreateErrorResponse(HttpStatusCode.BadRequest, message)); } //We need to create a temp copy of the nucache DB - to avoid file locks if its in use whilst we try to read it //'NuCache.Content.db' will become 'NuCache.Content.Explorer.Temp.db' var tempFileName = filePath.Replace(".db", ".Explorer.Temp.db"); File.Copy(filePath, tempFileName, true); var keySerializer = new PrimitiveSerializer(); var valueSerializer = new ContentNodeKitSerializer(); var options = new BPlusTree <int, ContentNodeKit> .OptionsV2(keySerializer, valueSerializer) { CreateFile = CreatePolicy.Never, FileName = tempFileName }; //Read the file into a BPlusTreeObject & select the kits var tree = new BPlusTree <int, ContentNodeKit>(options); var sw = Stopwatch.StartNew(); var kits = tree.Select(x => x.Value).ToArray(); sw.Stop(); tree.Dispose(); //Delete the file (seems like could be a lock, so we wait 100ms between each attempt upto 10 times) var ok = false; var attempts = 0; while (!ok) { System.Threading.Thread.Sleep(100); try { attempts++; File.Delete(tempFileName); ok = true; } catch { if (attempts == 10) { throw; } } } //Add to our JSON object the stopwatch clock to read the DB/dictionary file var response = new ApiResponse { Items = kits, TotalItems = kits.Length, StopClock = new StopClock { Hours = sw.Elapsed.Hours, Minutes = sw.Elapsed.Minutes, Seconds = sw.Elapsed.Seconds, Milliseconds = sw.Elapsed.Milliseconds, Ticks = sw.Elapsed.Ticks } }; return(Request.CreateResponse(HttpStatusCode.OK, response)); }
public void Dispose() { _db.Dispose(); }
public HttpResponseMessage GetNuCacheFile(string contentType) { var filePath = Path.Combine(globalSettings.LocalTempPath, "NuCache\\NuCache." + contentType + ".db"); var tempFileName = filePath.Replace(".db", ".Explorer.Temp.db"); try { //Check for valid filepath if (File.Exists(filePath) == false) { var message = $"No file exists on disk at {filePath}"; return(Request.CreateErrorResponse(HttpStatusCode.NotFound, message)); } //Check for file extension ends with .db //Don't want to attempt to any old file type if (Path.GetExtension(filePath) != ".db") { var message = $"The file {filePath} is not a .db file"; return(Request.CreateErrorResponse(HttpStatusCode.BadRequest, message)); } //We need to create a temp copy of the nucache DB - to avoid file locks if its in use whilst we try to read it //'NuCache.Content.db' will become 'NuCache.Content.Explorer.Temp.db' File.Copy(filePath, tempFileName, true); var keySerializer = new PrimitiveSerializer(); var valueSerializer = new ContentNodeKitSerializer(); var options = new BPlusTree <int, ContentNodeKit> .OptionsV2(keySerializer, valueSerializer) { CreateFile = CreatePolicy.Never, FileName = tempFileName, // default is 4096, min 2^9 = 512, max 2^16 = 64K FileBlockSize = GetBlockSize(), }; //Read the file into a BPlusTreeObject & select the kits var tree = new BPlusTree <int, ContentNodeKit>(options); var sw = Stopwatch.StartNew(); var kits = tree.Select(x => x.Value).ToArray(); sw.Stop(); tree.Dispose(); DeleteTempFile(tempFileName); //Add to our JSON object the stopwatch clock to read the DB/dictionary file var response = new ApiResponse { Items = kits, TotalItems = kits.Length, StopClock = new StopClock { Hours = sw.Elapsed.Hours, Minutes = sw.Elapsed.Minutes, Seconds = sw.Elapsed.Seconds, Milliseconds = sw.Elapsed.Milliseconds, Ticks = sw.Elapsed.Ticks } }; return(Request.CreateResponse(HttpStatusCode.OK, response)); } catch (Exception e) { DeleteTempFile(tempFileName); return(Request.CreateErrorResponse(HttpStatusCode.InternalServerError, e.Message)); } }
public void Dispose() { storageStream.Dispose(); index.Dispose(); }