public void Initialize(Logger log, string password) { // get log instance to disk _log = log; // if stream are empty, create header page and save to stream if (_stream.Length == 0) { _log.Write(Logger.DISK, "initialize new datafile"); // create datafile UltraLiteEngine.CreateDatabase(_stream, password); } }
private static Dictionary <uint, string> RecoveryCollectionPages(UltraLiteEngine engine, HeaderPage header, StringBuilder log) { var result = new Dictionary <uint, string>(); // get collection page foreach (var col in header.CollectionPages) { CollectionPage colPage = null; try { // read collection page var buffer = engine._disk.ReadPage(col.Value); var page = BasePage.ReadPage(buffer); if (page.PageType != PageType.Collection) { continue; } colPage = page as CollectionPage; } catch (Exception ex) { log.AppendLine($"Page {col.Value} (Collection) Error: {ex.Message}"); continue; } // get all pageID from all valid indexes var pagesID = new HashSet <uint>(colPage.Indexes.Where(x => x.IsEmpty == false && x.HeadNode.PageID != uint.MaxValue).Select(x => x.HeadNode.PageID)); // load all dataPages from this initial index pageIDs var dataPages = RecoveryDetectCollectionByIndexPages(engine, pagesID, log); // populate resultset with this collection name/data page foreach (var page in dataPages) { result[page] = col.Key; } } return(result); }
public void Initialize(Logger log, string password) { // get log instance to disk _log = log; _log.Write(Logger.DISK, "open datafile '{0}'", Path.GetFileName(_filename)); // open/create file using read only/exclusive options _stream = this.CreateFileStream(_filename, System.IO.FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.None); // if file is new, initialize if (_stream.Length == 0) { _log.Write(Logger.DISK, "initialize new datafile"); // create datafile UltraLiteEngine.CreateDatabase(_stream, password, _options.InitialSize); } }
private static HashSet <uint> RecoveryDetectCollectionByIndexPages(UltraLiteEngine engine, HashSet <uint> initialPagesID, StringBuilder log) { var indexPages = new Dictionary <uint, bool>(); var dataPages = new HashSet <uint>(); foreach (var pageID in initialPagesID) { indexPages.Add(pageID, false); } // discover all indexes pages related with this current indexPage (all of them are in same collection) while (indexPages.Count(x => x.Value == false) > 0) { var item = indexPages.First(x => x.Value == false); // mark page as readed indexPages[item.Key] = true; IndexPage indexPage = null; try { // try read page from disk and deserialize as IndexPage var buffer = engine._disk.ReadPage(item.Key); var page = BasePage.ReadPage(buffer); if (page.PageType != PageType.Index) { continue; } indexPage = page as IndexPage; } catch (Exception ex) { log.AppendLine($"Page {item.Key} (Collection) Error: {ex.Message}"); continue; } // now, check for all nodes to get dataPages foreach (var node in indexPage.Nodes.Values) { if (node.DataBlock.PageID != uint.MaxValue) { dataPages.Add(node.DataBlock.PageID); } // add into indexPages all possible indexPages if (!indexPages.ContainsKey(node.PrevNode.PageID) && node.PrevNode.PageID != uint.MaxValue) { indexPages.Add(node.PrevNode.PageID, false); } if (!indexPages.ContainsKey(node.NextNode.PageID) && node.NextNode.PageID != uint.MaxValue) { indexPages.Add(node.NextNode.PageID, false); } foreach (var pos in node.Prev.Where(x => !x.IsEmpty && x.PageID != uint.MaxValue)) { if (!indexPages.ContainsKey(pos.PageID)) { indexPages.Add(pos.PageID, false); } } foreach (var pos in node.Next.Where(x => !x.IsEmpty && x.PageID != uint.MaxValue)) { if (!indexPages.ContainsKey(pos.PageID)) { indexPages.Add(pos.PageID, false); } } } } return(dataPages); }
/// <summary> /// Try recovery data from current datafile into a new datafile. /// </summary> public static string Recovery(string filename) { // if not exists, just exit if (!File.Exists(filename)) { return(""); } var log = new StringBuilder(); var newfilename = FileHelper.GetTempFile(filename, "-recovery", true); var count = 0; using (var olddb = new UltraLiteEngine(filename)) using (var newdb = new UltraLiteEngine(newfilename, false)) { // get header from old database (this must must be possible to read) var header = olddb._pager.GetPage <HeaderPage>(0); var collections = RecoveryCollectionPages(olddb, header, log); // try recovery all data pages for (uint i = 1; i < header.LastPageID; i++) { DataPage dataPage = null; try { var buffer = olddb._disk.ReadPage(i); // searching only for DataPage (PageType == 4) if (buffer[4] != 4) { continue; } dataPage = BasePage.ReadPage(buffer) as DataPage; } catch (Exception ex) { log.AppendLine($"Page {i} (DataPage) Error: {ex.Message}"); continue; } // try find collectionName using pageID map (use fixed name if not found) if (collections.TryGetValue(i, out var colname) == false) { colname = "_recovery"; } foreach (var block in dataPage.DataBlocks) { try { // read bytes var bson = olddb._data.Read(block.Value.Position); // deserialize as document var doc = BsonSerializer.Deserialize(bson); // and insert into new database newdb.Insert(colname, doc); count++; } catch (Exception ex) { log.AppendLine($"Document {block.Value.Position} Error: {ex.Message}"); continue; } } } } log.Insert(0, $"Document recovery count: {count}\n"); return(log.ToString()); }
/// <summary> /// Reduce disk size re-arranging unused spaces. Can change password. If temporary disk was not provided, use MemoryStream temp disk /// </summary> public long Shrink(string password = null, IDiskService tempDisk = null) { var originalSize = _disk.FileLength; // if temp disk are not passed, use memory stream disk using (var temp = tempDisk ?? new StreamDiskService(new MemoryStream())) using (var engine = new UltraLiteEngine(temp, password)) { // read all collection foreach (var collectionName in this.GetCollectionNames()) { // first create all user indexes (exclude _id index) foreach (var index in this.GetIndexes(collectionName).Where(x => x.Field != "_id")) { engine.EnsureIndex(collectionName, index.Field, index.Unique); } // now copy documents var docs = this.Find(collectionName, Query.All()); engine.InsertBulk(collectionName, docs); // fix collection sequence number var seq = _collections.Get(collectionName).Sequence; engine.Transaction(collectionName, true, (col) => { col.Sequence = seq; engine._pager.SetDirty(col); return(true); }); } // copy user version engine.UserVersion = this.UserVersion; // set current disk size to exact new disk usage _disk.SetLength(temp.FileLength); // read new header page to start copy var header = BasePage.ReadPage(temp.ReadPage(0)) as HeaderPage; // copy (as is) all pages from temp disk to original disk for (uint i = 0; i <= header.LastPageID; i++) { // skip lock page if (i == 1) { continue; } var page = temp.ReadPage(i); _disk.WritePage(i, page); } // create/destroy crypto class if (_crypto != null) { _crypto.Dispose(); } _crypto = password == null ? null : new AesEncryption(password, header.Salt); // initialize all services again (crypto can be changed) this.InitializeServices(); // return how many bytes are reduced return(originalSize - temp.FileLength); } }