Пример #1
0
        /// <summary>
        /// Get a page from cache or from disk (and put on cache)
        /// </summary>
        public T GetPage <T>(uint pageID, bool setDirty = false)
            where T : BasePage
        {
            var page = _cache.GetPage(pageID);

            // is not on cache? load from disk
            if (page == null)
            {
                var buffer = _disk.ReadPage(pageID);
                page = BasePage.ReadPage(buffer);
                _cache.AddPage(page);
            }

#if DEBUG
            // if page is empty, convert to T
            if (page.PageType == PageType.Empty && typeof(T) != typeof(BasePage))
            {
                throw new Exception("Pager.GetPage<T>() never shuld happend");
            }
#endif
            // set page as dirty if passing by param
            if (setDirty)
            {
                this.SetDirty((T)page);
            }

            return((T)page);
        }
Пример #2
0
        /// <summary>
        /// Try recovery journal file (if exists). Restore original datafile
        /// Journal file are NOT encrypted (even when datafile are encrypted)
        /// </summary>
        public void Recovery()
        {
            var fileSize = _disk.FileLength;

            // read all journal pages
            foreach (var buffer in _disk.ReadJournal())
            {
                // read pageID (first 4 bytes)
                var pageID = BitConverter.ToUInt32(buffer, 0);

                _log.Write(Logger.RECOVERY, "recover page #{0:0000}", pageID);

                // if header, read all byte (to get original filesize)
                if (pageID == 0)
                {
                    var header = (HeaderPage)BasePage.ReadPage(buffer);

                    fileSize = BasePage.GetSizeOfPages(header.LastPageID + 1);
                }

                // write in stream (encrypt if datafile is encrypted)
                _disk.WritePage(pageID, _crypto == null || pageID == 0 ? buffer : _crypto.Encrypt(buffer));
            }

            _log.Write(Logger.RECOVERY, "resize datafile to {0} bytes", fileSize);

            // redim filesize if grow more than original before rollback
            _disk.SetLength(fileSize);

            // empty journal file
            _disk.ClearJournal();
        }
Пример #3
0
        /// <summary>
        /// Read page bytes from disk
        /// </summary>
        public virtual byte[] ReadPage(uint pageID)
        {
            var buffer   = new byte[BasePage.PAGE_SIZE];
            var position = BasePage.GetSizeOfPages(pageID);

            // position cursor
            if (_stream.Position != position)
            {
                _stream.Seek(position, SeekOrigin.Begin);
            }

            // read bytes from data file
            _stream.Read(buffer, 0, BasePage.PAGE_SIZE);

            _log.Write(Logger.DISK, "read page #{0:0000} :: {1}", pageID, (PageType)buffer[PAGE_TYPE_POSITION]);

            // when read header, checks passoword
            if (pageID == 0)
            {
                // I know, header page will be double read (it's the price for isolated concerns)
                var header = (HeaderPage)BasePage.ReadPage(buffer);
                ValidatePassword(header.Password);
            }

            return(buffer);
        }
Пример #4
0
        private void Recovery(FileStream journal)
        {
            var fileSize = _stream.Length;
            var buffer   = new byte[BasePage.PAGE_SIZE];

            journal.Seek(0, SeekOrigin.Begin);

            while (journal.Position < journal.Length)
            {
                // read page bytes from journal file
                journal.Read(buffer, 0, BasePage.PAGE_SIZE);

                // read pageID (first 4 bytes)
                var pageID = BitConverter.ToUInt32(buffer, 0);

                _log.Write(Logger.RECOVERY, "recover page #{0:0000}", pageID);

                // if header, read all byte (to get original filesize)
                if (pageID == 0)
                {
                    var header = (HeaderPage)BasePage.ReadPage(buffer);

                    fileSize = (header.LastPageID + 1) * BasePage.PAGE_SIZE;
                }

                // write in stream
                this.WritePage(pageID, buffer);
            }

            _log.Write(Logger.RECOVERY, "resize datafile to {0} bytes", fileSize);

            // redim filesize if grow more than original before rollback
            _stream.SetLength(fileSize);
        }
Пример #5
0
        /// <summary>
        /// Dump all pages into a string - debug purpose only
        /// </summary>
        public StringBuilder DumpPages(uint startPage = 0, uint endPage = uint.MaxValue)
        {
            var sb = new StringBuilder();

            sb.AppendLine("Dump database");
            sb.AppendLine("=============");
            sb.AppendLine();

            var header = (HeaderPage)BasePage.ReadPage(_disk.ReadPage(0));

            for (uint i = startPage; i <= endPage; i++)
            {
                if (i > header.LastPageID)
                {
                    break;
                }

                var p = BasePage.ReadPage(_disk.ReadPage(i));

                sb.AppendFormat("{0} <{1},{2}> [{3}] {4}{5} | ",
                                p.PageID.Dump(),
                                p.PrevPageID.Dump(),
                                p.NextPageID.Dump(),
                                p.PageType.ToString().PadRight(6).Substring(0, 6),
                                p.FreeBytes.ToString("0000"),
                                p.IsDirty ? "d" : " ");

                p.Dump(sb);
                sb.AppendLine();
            }

            return(sb);
        }
Пример #6
0
        private void AvoidDirtyRead()
        {
            // if disk are exclusive don't need check dirty read
            if (_disk.IsExclusive)
            {
                return;
            }

            _log.Write(Logger.CACHE, "checking disk to avoid dirty read");

            // empty cache? just exit
            if (_cache.CleanUsed == 0)
            {
                return;
            }

            // get ChangeID from cache
            var header   = _cache.GetPage(0) as HeaderPage;
            var changeID = header == null ? 0 : header.ChangeID;

            // and get header from disk
            var disk = BasePage.ReadPage(_disk.ReadPage(0)) as HeaderPage;

            // if header change, clear cache and add new header to cache
            if (disk.ChangeID != changeID)
            {
                _log.Write(Logger.CACHE, "file changed from another process, cleaning all cache pages");

                _cache.ClearPages();
                _cache.AddPage(disk);
            }
        }
        /// <summary>
        /// Get journal pages and override all into datafile
        /// </summary>
        public void Recovery()
        {
            _log.Write(Logger.RECOVERY, "initializing recovery mode");

            using (_locker.Write())
            {
                // double check in header need recovery (could be already recover from another thread)
                var header = BasePage.ReadPage(_disk.ReadPage(0)) as HeaderPage;

                if (header.Recovery == false)
                {
                    return;
                }

                // read all journal pages
                foreach (var buffer in _disk.ReadJournal(header.LastPageID))
                {
                    // read pageID (first 4 bytes)
                    var pageID = BitConverter.ToUInt32(buffer, 0);

                    _log.Write(Logger.RECOVERY, "recover page #{0:0000}", pageID);

                    // write in stream (encrypt if datafile is encrypted)
                    _disk.WritePage(pageID, _crypto == null || pageID == 0 ? buffer : _crypto.Encrypt(buffer));
                }

                // shrink datafile
                _disk.ClearJournal(header.LastPageID);
            }
        }
Пример #8
0
        /// <summary>
        /// Get a page from cache or from disk (get from cache or from disk)
        /// </summary>
        public T GetPage <T>(uint pageID)
            where T : BasePage
        {
            lock (_disk)
            {
                var page = _cache.GetPage(pageID);

                // is not on cache? load from disk
                if (page == null)
                {
                    var buffer = _disk.ReadPage(pageID);

                    // if datafile are encrypted, decrypt buffer (header are not encrypted)
                    if (_crypto != null && pageID > 0)
                    {
                        buffer = _crypto.Decrypt(buffer);
                    }

                    page = BasePage.ReadPage(buffer);

                    _cache.AddPage(page);
                }

                return((T)page);
            }
        }
Пример #9
0
        /// <summary>
        /// Get a page from cache or from disk (and put on cache)
        /// </summary>
        public T GetPage <T>(uint pageID)
            where T : BasePage
        {
            // lock concurrency access (read access are not in a lock transaction)
            lock (_cache)
            {
                var page = _cache.GetOrDefault(pageID);

                // is not on cache? load from disk
                if (page == null)
                {
                    var buffer = _disk.ReadPage(pageID);

                    // if datafile are encrypted, decrypt buffer (header are not encrypted)
                    if (_crypto != null && pageID > 0)
                    {
                        buffer = _crypto.Decrypt(buffer);
                    }

                    page = BasePage.ReadPage(buffer);
                    _cache.Add(pageID, page);
                }

                return((T)page);
            }
        }
Пример #10
0
        /// <summary>
        /// Initialize LiteEngine using custom disk service implementation and full engine options
        /// </summary>
        public LiteEngine(IDiskService disk, string password = null, TimeSpan?timeout = null, int cacheSize = 5000, Logger log = null, bool utcDate = false)
        {
            if (disk == null)
            {
                throw new ArgumentNullException("disk");
            }

            _timeout    = timeout ?? TimeSpan.FromMinutes(1);
            _cacheSize  = cacheSize;
            _disk       = disk;
            _log        = log ?? new Logger();
            _bsonReader = new BsonReader(utcDate);

            try
            {
                // initialize datafile (create) and set log instance
                _disk.Initialize(_log, password);

                // lock disk (read mode) before read header
                var position = _disk.Lock(LockState.Read, _timeout);

                var buffer = _disk.ReadPage(0);

                _disk.Unlock(LockState.Read, position);

                // create header instance from array bytes
                var header = BasePage.ReadPage(buffer) as HeaderPage;

                // hash password with sha1 or keep as empty byte[20]
                var sha1 = password == null ? new byte[20] : AesEncryption.HashSHA1(password);

                // compare header password with user password even if not passed password (datafile can have password)
                if (sha1.BinaryCompareTo(header.Password) != 0)
                {
                    throw LiteException.DatabaseWrongPassword();
                }

                // initialize AES encryptor
                if (password != null)
                {
                    _crypto = new AesEncryption(password, header.Salt);
                }

                // initialize all services
                this.InitializeServices();

                // if header are marked with recovery, do it now
                if (header.Recovery)
                {
                    _trans.Recovery();
                }
            }
            catch (Exception)
            {
                // explicit dispose
                this.Dispose();
                throw;
            }
        }
Пример #11
0
        /// <summary>
        /// Reduce disk size re-arranging unused spaces. Can change password. If temporary disk was not provided, use MemoryStream temp disk
        /// </summary>
        public long Shrink(string password = null, IDiskService temp = null)
        {
            var originalSize = _disk.FileLength;

            // if temp disk are not passed, use memory stream disk
            temp = temp ?? new StreamDiskService(new MemoryStream());

            using (_locker.Reserved())
                using (_locker.Exclusive())
                    using (var engine = new LiteEngine(temp, password))
                    {
                        // read all collection
                        foreach (var collectionName in this.GetCollectionNames())
                        {
                            // first create all user indexes (exclude _id index)
                            foreach (var index in this.GetIndexes(collectionName).Where(x => x.Field != "_id"))
                            {
                                engine.EnsureIndex(collectionName, index.Field, index.Unique);
                            }

                            // now copy documents
                            var docs = this.Find(collectionName, Query.All());

                            engine.InsertBulk(collectionName, docs);
                        }

                        // copy user version
                        engine.UserVersion = this.UserVersion;

                        // set current disk size to exact new disk usage
                        _disk.SetLength(temp.FileLength);

                        // read new header page to start copy
                        var header = BasePage.ReadPage(temp.ReadPage(0)) as HeaderPage;

                        // copy (as is) all pages from temp disk to original disk
                        for (uint i = 0; i <= header.LastPageID; i++)
                        {
                            var page = temp.ReadPage(i);

                            _disk.WritePage(i, page);
                        }

                        // create/destroy crypto class
                        _crypto = password == null ? null : new AesEncryption(password, header.Salt);

                        // initialize all services again (crypto can be changed)
                        this.InitializeServices();

                        // return how many bytes are reduced
                        return(originalSize - temp.FileLength);
                    }
        }
Пример #12
0
        /// <summary>
        /// Initialize LiteEngine using custom disk service implementation and full engine options
        /// </summary>
        public LiteEngine(IDiskService disk, string password = null, TimeSpan?timeout = null, int cacheSize = 5000, Logger log = null)
        {
            if (disk == null)
            {
                throw new ArgumentNullException("disk");
            }

            _timeout   = timeout ?? TimeSpan.FromMinutes(1);
            _cacheSize = cacheSize;
            _disk      = disk;
            _log       = log ?? new Logger();

            try
            {
                // initialize datafile (create) and set log instance
                _disk.Initialize(_log, password);

                // read header page
                var header = BasePage.ReadPage(_disk.ReadPage(0)) as HeaderPage;

                // hash password with sha1 or keep as empty byte[20]
                var sha1 = password == null ? new byte[20] : AesEncryption.HashSHA1(password);

                // compare header password with user password even if not passed password (datafile can have password)
                if (sha1.BinaryCompareTo(header.Password) != 0)
                {
                    throw LiteException.DatabaseWrongPassword();
                }

                // initialize AES encryptor
                if (password != null)
                {
                    _crypto = new AesEncryption(password, header.Salt);
                }

                // initialize all services
                this.InitializeServices();

                // try recovery if has journal file
                _trans.Recovery();
            }
            catch (Exception)
            {
                // explicit dispose
                this.Dispose();
                throw;
            }
        }
Пример #13
0
        /// <summary>
        /// Dump all pages into a string - debug purpose only
        /// </summary>
        public StringBuilder DumpPages(uint startPage = 0, uint endPage = uint.MaxValue)
        {
            var sb = new StringBuilder();

            sb.AppendLine("Dump database");
            sb.AppendLine("=============");
            sb.AppendLine();

            using (var trans = _transaction.Begin(true))
            {
                try
                {
                    var header = (HeaderPage)BasePage.ReadPage(_disk.ReadPage(0));

                    for (uint i = startPage; i <= endPage; i++)
                    {
                        if (i > header.LastPageID)
                        {
                            break;
                        }

                        var p = BasePage.ReadPage(_disk.ReadPage(i));

                        sb.AppendFormat("{0} <{1},{2}> [{3}] {4}{5} | ",
                                        p.PageID.Dump(),
                                        p.PrevPageID.Dump(),
                                        p.NextPageID.Dump(),
                                        p.PageType.ToString().PadRight(6).Substring(0, 6),
                                        p.FreeBytes.ToString("0000"),
                                        p.IsDirty ? "d" : " ");

                        p.Dump(sb);
                        sb.AppendLine();

                        trans.Commit();
                    }
                }
                catch (Exception ex)
                {
                    _log.Write(Logger.ERROR, ex.Message);
                    trans.Rollback();
                    throw;
                }
            }
            return(sb);
        }
Пример #14
0
        /// <summary>
        /// Test if cache still valid (if datafile was changed by another process reset cache)
        /// Returns true if file was changed
        /// [Thread Safe]
        /// </summary>
        private bool DetectDatabaseChanges()
        {
            // if disk are exclusive don't need check dirty read
            if (_disk.IsExclusive)
            {
                return(false);
            }

            // empty cache? just exit
            if (_cache.CleanUsed == 0)
            {
                return(false);
            }

            _log.Write(Logger.CACHE, "checking disk to detect database changes from another process");

            // get ChangeID from cache
            var header   = _cache.GetPage(0) as HeaderPage;
            var changeID = header == null ? 0 : header.ChangeID;

            // and get header from disk
            var disk = BasePage.ReadPage(_disk.ReadPage(0)) as HeaderPage;

            // if disk header are in recovery mode, throw exception to datafile re-open and recovery pages
            if (disk.Recovery)
            {
                _log.Write(Logger.ERROR, "datafile in recovery mode, need re-open database");

                throw LiteException.NeedRecover();
            }

            // if header change, clear cache and add new header to cache
            if (disk.ChangeID != changeID)
            {
                _log.Write(Logger.CACHE, "file changed from another process, cleaning all cache pages");

                _cache.ClearPages();
                _cache.AddPage(disk);
                return(true);
            }

            return(false);
        }
Пример #15
0
        private static Dictionary <uint, string> RecoveryCollectionPages(LiteEngine engine, HeaderPage header, StringBuilder log)
        {
            var result = new Dictionary <uint, string>();

            // get collection page
            foreach (var col in header.CollectionPages)
            {
                CollectionPage colPage = null;

                try
                {
                    // read collection page
                    var buffer = engine._disk.ReadPage(col.Value);
                    var page   = BasePage.ReadPage(buffer);

                    if (page.PageType != PageType.Collection)
                    {
                        continue;
                    }

                    colPage = page as CollectionPage;
                }
                catch (Exception ex)
                {
                    log.AppendLine($"Page {col.Value} (Collection) Error: {ex.Message}");
                    continue;
                }

                // get all pageID from all valid indexes
                var pagesID = new HashSet <uint>(colPage.Indexes.Where(x => x.IsEmpty == false && x.HeadNode.PageID != uint.MaxValue).Select(x => x.HeadNode.PageID));

                // load all dataPages from this initial index pageIDs
                var dataPages = RecoveryDetectCollectionByIndexPages(engine, pagesID, log);

                // populate resultset with this collection name/data page
                foreach (var page in dataPages)
                {
                    result[page] = col.Key;
                }
            }

            return(result);
        }
Пример #16
0
        /// <summary>
        /// Override read page decrypting data from disk
        /// </summary>
        public override byte[] ReadPage(uint pageID)
        {
            var buffer = base.ReadPage(pageID);

            // when read header, checks passoword
            if (pageID == 0)
            {
                // I know, header page will be double read (it's the price for isolated concerns)
                var header = (HeaderPage)BasePage.ReadPage(buffer);

                if (header.DbParams.Password.BinaryCompareTo(_password) != 0)
                {
                    throw LiteException.DatabaseWrongPassword();
                }

                return(buffer);
            }

            return(_crypto.Decrypt(buffer));
        }
Пример #17
0
        /// <summary>
        /// Try recovery data from current datafile into a new datafile.
        /// </summary>
        public static string Recovery(string filename)
        {
            // if not exists, just exit
            if (!File.Exists(filename))
            {
                return("");
            }

            var log         = new StringBuilder();
            var newfilename = FileHelper.GetTempFile(filename, "-recovery", true);
            var count       = 0;

            using (var olddb = new LiteEngine(filename))
                using (var newdb = new LiteEngine(newfilename, false))
                {
                    // get header from old database (this must must be possible to read)
                    var header = olddb._pager.GetPage <HeaderPage>(0);

                    var collections = RecoveryCollectionPages(olddb, header, log);

                    // try recovery all data pages
                    for (uint i = 1; i < header.LastPageID; i++)
                    {
                        DataPage dataPage = null;

                        try
                        {
                            var buffer = olddb._disk.ReadPage(i);

                            // searching only for DataPage (PageType == 4)
                            if (buffer[4] != 4)
                            {
                                continue;
                            }

                            dataPage = BasePage.ReadPage(buffer) as DataPage;
                        }
                        catch (Exception ex)
                        {
                            log.AppendLine($"Page {i} (DataPage) Error: {ex.Message}");
                            continue;
                        }

                        // try find collectionName using pageID map (use fixed name if not found)
                        if (collections.TryGetValue(i, out var colname) == false)
                        {
                            colname = "_recovery";
                        }

                        foreach (var block in dataPage.DataBlocks)
                        {
                            try
                            {
                                // read bytes
                                var bson = olddb._data.Read(block.Value.Position);

                                // deserialize as document
                                var doc = BsonSerializer.Deserialize(bson);

                                // and insert into new database
                                newdb.Insert(colname, doc);

                                count++;
                            }
                            catch (Exception ex)
                            {
                                log.AppendLine($"Document {block.Value.Position} Error: {ex.Message}");
                                continue;
                            }
                        }
                    }
                }

            log.Insert(0, $"Document recovery count: {count}\n");

            return(log.ToString());
        }
Пример #18
0
        private static HashSet <uint> RecoveryDetectCollectionByIndexPages(LiteEngine engine, HashSet <uint> initialPagesID, StringBuilder log)
        {
            var indexPages = new Dictionary <uint, bool>();
            var dataPages  = new HashSet <uint>();

            foreach (var pageID in initialPagesID)
            {
                indexPages.Add(pageID, false);
            }

            // discover all indexes pages related with this current indexPage (all of them are in same collection)
            while (indexPages.Count(x => x.Value == false) > 0)
            {
                var item = indexPages.First(x => x.Value == false);

                // mark page as readed
                indexPages[item.Key] = true;
                IndexPage indexPage = null;

                try
                {
                    // try read page from disk and deserialize as IndexPage
                    var buffer = engine._disk.ReadPage(item.Key);
                    var page   = BasePage.ReadPage(buffer);

                    if (page.PageType != PageType.Index)
                    {
                        continue;
                    }

                    indexPage = page as IndexPage;
                }
                catch (Exception ex)
                {
                    log.AppendLine($"Page {item.Key} (Collection) Error: {ex.Message}");
                    continue;
                }

                // now, check for all nodes to get dataPages
                foreach (var node in indexPage.Nodes.Values)
                {
                    if (node.DataBlock.PageID != uint.MaxValue)
                    {
                        dataPages.Add(node.DataBlock.PageID);
                    }

                    // add into indexPages all possible indexPages
                    if (!indexPages.ContainsKey(node.PrevNode.PageID) && node.PrevNode.PageID != uint.MaxValue)
                    {
                        indexPages.Add(node.PrevNode.PageID, false);
                    }

                    if (!indexPages.ContainsKey(node.NextNode.PageID) && node.NextNode.PageID != uint.MaxValue)
                    {
                        indexPages.Add(node.NextNode.PageID, false);
                    }

                    foreach (var pos in node.Prev.Where(x => !x.IsEmpty && x.PageID != uint.MaxValue))
                    {
                        if (!indexPages.ContainsKey(pos.PageID))
                        {
                            indexPages.Add(pos.PageID, false);
                        }
                    }

                    foreach (var pos in node.Next.Where(x => !x.IsEmpty && x.PageID != uint.MaxValue))
                    {
                        if (!indexPages.ContainsKey(pos.PageID))
                        {
                            indexPages.Add(pos.PageID, false);
                        }
                    }
                }
            }

            return(dataPages);
        }