Example #1
0
        /// <summary>
        /// Read page bytes from disk
        /// </summary>
        public virtual byte[] ReadPage(uint pageID)
        {
            // if stream are not initialized but need header, create new header
            if (_stream == null && pageID == 0)
            {
                var header = new HeaderPage
                {
                    LastPageID = 1
                };

                return(header.WritePage());
            }
            else if (_stream == null)
            {
                this.InternalInitialize();
            }

            var buffer   = new byte[BasePage.PAGE_SIZE];
            var position = BasePage.GetSizeOfPages(pageID);

            // position cursor
            if (_stream.Position != position)
            {
                _stream.Seek(position, SeekOrigin.Begin);
            }

            // read bytes from data file
            _stream.Read(buffer, 0, BasePage.PAGE_SIZE);

            return(buffer);
        }
Example #2
0
        /// <summary>
        /// List all indexes inside a collection
        /// </summary>
        public IEnumerable <IndexInfo> GetIndexes(string colName, bool stats = false)
        {
            // transaction will be closed as soon as the IEnumerable goes out of scope
            using (var trans = _transaction.Begin(true))
            {
                var col = this.GetCollectionPage(colName, false);

                if (col == null)
                {
                    yield break;
                }

                foreach (var index in col.GetIndexes(true))
                {
                    var info = new IndexInfo(index);

                    if (stats)
                    {
                        _cache.CheckPoint();

                        var pages = _indexer.FindAll(index, Query.Ascending).GroupBy(x => x.Page.PageID).Count();

                        // this command can be consume too many memory!! has no CheckPoint on loop
                        var keySize = pages == 0 ? 0 : _indexer.FindAll(index, Query.Ascending).Average(x => x.KeyLength);

                        info.Stats                = new IndexInfo.IndexStats();
                        info.Stats.Pages          = pages;
                        info.Stats.Allocated      = BasePage.GetSizeOfPages(pages);
                        info.Stats.KeyAverageSize = (int)keySize;
                    }

                    yield return(info);
                }
            }
        }
Example #3
0
        private void Recovery(FileStream journal)
        {
            var fileSize = _stream.Length;
            var buffer = new byte[BasePage.PAGE_SIZE];

            journal.Seek(0, SeekOrigin.Begin);

            while (journal.Position < journal.Length)
            {
                // read page bytes from journal file
                journal.Read(buffer, 0, BasePage.PAGE_SIZE);

                // read pageID (first 4 bytes)
                var pageID = BitConverter.ToUInt32(buffer, 0);

                _log.Write(Logger.RECOVERY, "recover page #{0:0000}", pageID);

                // if header, read all byte (to get original filesize)
                if (pageID == 0)
                {
                    var header = (HeaderPage)BasePage.ReadPage(buffer);

                    fileSize = BasePage.GetSizeOfPages(header.LastPageID + 1);
                }

                // write in stream
                this.WritePage(pageID, buffer);
            }

            _log.Write(Logger.RECOVERY, "resize datafile to {0} bytes", fileSize);

            // redim filesize if grow more than original before rollback
            _stream.SetLength(fileSize);
        }
Example #4
0
        /// <summary>
        /// Read page bytes from disk
        /// </summary>
        public virtual byte[] ReadPage(uint pageID)
        {
            var buffer   = new byte[BasePage.PAGE_SIZE];
            var position = BasePage.GetSizeOfPages(pageID);

            // position cursor
            if (_stream.Position != position)
            {
                _stream.Seek(position, SeekOrigin.Begin);
            }

            // read bytes from data file
            _stream.Read(buffer, 0, BasePage.PAGE_SIZE);

            _log.Write(Logger.DISK, "read page #{0:0000} :: {1}", pageID, (PageType)buffer[PAGE_TYPE_POSITION]);

            // when read header, checks passoword
            if (pageID == 0)
            {
                // I know, header page will be double read (it's the price for isolated concerns)
                var header = (HeaderPage)BasePage.ReadPage(buffer);
                ValidatePassword(header.Password);
            }

            return(buffer);
        }
Example #5
0
        /// <summary>
        /// Save all dirty pages to disk
        /// </summary>
        public void Commit()
        {
            // get header page
            var header = _pager.GetPage <HeaderPage>(0);

            // set final datafile length (optimize page writes)
            _disk.SetLength(BasePage.GetSizeOfPages(header.LastPageID + 1));

            // write all dirty pages in data file
            foreach (var page in _pager.GetDirtyPages())
            {
                // first write in journal file original data
                _disk.WriteJournal(page.PageID, page.DiskData);

                // then writes no datafile new changed pages
                // page.WritePage() updated DiskData with new rendered buffer
                _disk.WritePage(page.PageID, _crypto == null || page.PageID == 0 ? page.WritePage() : _crypto.Encrypt(page.WritePage()));

                // mark page as clean (is now saved in disk)
                page.IsDirty = false;
            }

            // discard journal file
            _disk.ClearJournal();
        }
Example #6
0
        /// <summary>
        /// Try recovery journal file (if exists). Restore original datafile
        /// Journal file are NOT encrypted (even when datafile are encrypted)
        /// </summary>
        public void Recovery()
        {
            var fileSize = _disk.FileLength;

            // read all journal pages
            foreach (var buffer in _disk.ReadJournal())
            {
                // read pageID (first 4 bytes)
                var pageID = BitConverter.ToUInt32(buffer, 0);

                _log.Write(Logger.RECOVERY, "recover page #{0:0000}", pageID);

                // if header, read all byte (to get original filesize)
                if (pageID == 0)
                {
                    var header = (HeaderPage)BasePage.ReadPage(buffer);

                    fileSize = BasePage.GetSizeOfPages(header.LastPageID + 1);
                }

                // write in stream (encrypt if datafile is encrypted)
                _disk.WritePage(pageID, _crypto == null || pageID == 0 ? buffer : _crypto.Encrypt(buffer));
            }

            _log.Write(Logger.RECOVERY, "resize datafile to {0} bytes", fileSize);

            // redim filesize if grow more than original before rollback
            _disk.SetLength(fileSize);

            // empty journal file
            _disk.ClearJournal();
        }
Example #7
0
        /// <summary>
        /// Returns a size of specified number of pages
        /// </summary>
        /// <param name="pageCount">The page count</param>
        public static long GetSizeOfPages(int pageCount)
        {
            if (pageCount < 0)
            {
                throw new ArgumentOutOfRangeException("pageCount", "Could not be less than 0.");
            }

            return(BasePage.GetSizeOfPages((uint)pageCount));
        }
Example #8
0
        /// <summary>
        /// Persist single page bytes to disk
        /// </summary>
        public void WritePage(uint pageID, byte[] buffer)
        {
            var position = BasePage.GetSizeOfPages(pageID);

            // position cursor
            if (_stream.Position != position)
            {
                _stream.Seek(position, SeekOrigin.Begin);
            }

            _stream.Write(buffer, 0, BasePage.PAGE_SIZE);
        }
Example #9
0
        /// <summary>
        /// Persist single page bytes to disk
        /// </summary>
        public virtual void WritePage(uint pageID, byte[] buffer)
        {
            if (_stream == null) this.InternalInitialize();

            var position = BasePage.GetSizeOfPages(pageID);

            // position cursor
            if (_stream.Position != position)
            {
                _stream.Seek(position, SeekOrigin.Begin);
            }

            _stream.Write(buffer, 0, BasePage.PAGE_SIZE);
        }
Example #10
0
        /// <summary>
        /// Persist single page bytes to disk
        /// </summary>
        public virtual void WritePage(uint pageID, byte[] buffer)
        {
            var position = BasePage.GetSizeOfPages(pageID);

            _log.Write(Logger.DISK, "write page #{0:0000} :: {1}", pageID, (PageType)buffer[PAGE_TYPE_POSITION]);

            // position cursor
            if (_stream.Position != position)
            {
                _stream.Seek(position, SeekOrigin.Begin);
            }

            _stream.Write(buffer, 0, BasePage.PAGE_SIZE);
        }
Example #11
0
        /// <summary>
        /// Read page bytes from disk
        /// </summary>
        public byte[] ReadPage(uint pageID)
        {
            var buffer   = new byte[BasePage.PAGE_SIZE];
            var position = BasePage.GetSizeOfPages(pageID);

            // position cursor
            if (_stream.Position != position)
            {
                _stream.Seek(position, SeekOrigin.Begin);
            }

            // read bytes from data file
            _stream.Read(buffer, 0, BasePage.PAGE_SIZE);

            return(buffer);
        }
        /// <summary>
        ///     Save all dirty pages to disk - do not touch on lock disk
        /// </summary>
        private void Save()
        {
            // get header and mark as dirty
            var header = _pager.GetPage <HeaderPage>(0, true);

            // increase file changeID (back to 0 when overflow)
            header.ChangeID = header.ChangeID == ushort.MaxValue ? (ushort)0 : (ushort)(header.ChangeID + 1);

            // set final datafile length (optimize page writes)
            _disk.SetLength(BasePage.GetSizeOfPages(header.LastPageID + 1));

            // write all dirty pages in data file
            foreach (var page in _cache.GetDirtyPages())
            {
                _disk.WritePage(page.PageID, page.WritePage());
            }
        }
        /// <summary>
        /// Read page bytes from disk
        /// </summary>
        public virtual byte[] ReadPage(uint pageID)
        {
            var buffer   = new byte[BasePage.PAGE_SIZE];
            var position = BasePage.GetSizeOfPages(pageID);

            // position cursor
            if (_stream.Position != position)
            {
                _stream.Seek(position, SeekOrigin.Begin);
            }

            // read bytes from data file
            _stream.Read(buffer, 0, BasePage.PAGE_SIZE);

            _log.Write(Logger.DISK, "read page #{0:0000} :: {1}", pageID, (PageType)buffer[PAGE_TYPE_POSITION]);

            return(buffer);
        }
Example #14
0
        /// <summary>
        /// Save all dirty pages to disk
        /// </summary>
        public void Commit()
        {
            // get header page
            var header = _pager.GetPage <HeaderPage>(0);

            // increase file changeID (back to 0 when overflow)
            header.ChangeID = header.ChangeID == ushort.MaxValue ? (ushort)0 : (ushort)(header.ChangeID + (ushort)1);

            // mark header as dirty
            _pager.SetDirty(header);

            // write journal file
            _disk.WriteJournal(_cache.GetDirtyPages()
                               .Select(x => x.DiskData)
                               .Where(x => x.Length > 0)
                               .ToList());

            // enter in exclusive lock mode to write on disk
            using (_locker.Exclusive())
            {
                // set final datafile length (optimize page writes)
                _disk.SetLength(BasePage.GetSizeOfPages(header.LastPageID + 1));

                foreach (var page in _cache.GetDirtyPages())
                {
                    // page.WritePage() updated DiskData with new rendered buffer
                    var buffer = _crypto == null || page.PageID == 0 ?
                                 page.WritePage() :
                                 _crypto.Encrypt(page.WritePage());

                    _disk.WritePage(page.PageID, buffer);
                }

                // mark all dirty pages in clean pages (all are persisted in disk and are valid pages)
                _cache.MarkDirtyAsClean();

                // ensure all pages from OS cache has been persisted on medium
                _disk.Flush();

                // discard journal file
                _disk.ClearJournal();
            }
        }
        public FileDiskService(ConnectionString conn, Logger log)
        {
            _filename = conn.GetValue("filename", "");
            var journalEnabled = conn.GetValue("journal", true);

            _timeout     = conn.GetValue("timeout", new TimeSpan(0, 1, 0));
            _readonly    = conn.GetValue("readonly", false);
            _initialSize = conn.GetFileSize("initial size", 0);
            _limitSize   = conn.GetFileSize("limit size", 0);
            var level = conn.GetValue <byte?>("log", null);

            // simple validations
            if (_filename.IsNullOrWhiteSpace())
            {
                throw new ArgumentNullException("filename");
            }
            if (_initialSize > 0 && _initialSize < BasePage.GetSizeOfPages(10))
            {
                throw new ArgumentException("initial size too low");
            }
            if (_limitSize > 0 && _limitSize < BasePage.GetSizeOfPages(10))
            {
                throw new ArgumentException("limit size too low");
            }
            if (_initialSize > 0 && _limitSize > 0 && _initialSize > _limitSize)
            {
                throw new ArgumentException("limit size less than initial size");
            }

            // setup log + log-level
            _log = log;
            if (level.HasValue)
            {
                _log.Level = level.Value;
            }

            _journalEnabled  = _readonly ? false : journalEnabled; // readonly? no journal
            _journalFilename = Path.Combine(Path.GetDirectoryName(_filename),
                                            Path.GetFileNameWithoutExtension(_filename) + "-journal" + Path.GetExtension(_filename));
            _tempFilename = Path.Combine(Path.GetDirectoryName(_filename),
                                         Path.GetFileNameWithoutExtension(_filename) + "-temp" + Path.GetExtension(_filename));
        }
Example #16
0
        /// <summary>
        /// Get internal information about database. Can filter collections
        /// </summary>
        public BsonDocument Info()
        {
            using (_locker.Read())
            {
                var header      = _pager.GetPage <HeaderPage>(0);
                var collections = new BsonArray();

                foreach (var colName in header.CollectionPages.Keys)
                {
                    var col = this.GetCollectionPage(colName, false);

                    var colDoc = new BsonDocument
                    {
                        { "name", col.CollectionName },
                        { "pageID", (double)col.PageID },
                        { "count", col.DocumentCount },
                        { "sequence", col.Sequence },
                        { "indexes", new BsonArray(
                              col.Indexes.Where(x => !x.IsEmpty).Select(i => new BsonDocument
                            {
                                { "slot", i.Slot },
                                { "field", i.Field },
                                { "expression", i.Expression },
                                { "unique", i.Unique }
                            })) }
                    };

                    collections.Add(colDoc);
                }

                return(new BsonDocument
                {
                    { "userVersion", (int)header.UserVersion },
                    { "encrypted", header.Password.Any(x => x > 0) },
                    { "changeID", (int)header.ChangeID },
                    { "lastPageID", (int)header.LastPageID },
                    { "fileSize", BasePage.GetSizeOfPages(header.LastPageID + 1) },
                    { "collections", collections }
                });
            }
        }
Example #17
0
        /// <summary>
        /// Get stats from a collection
        /// </summary>
        public CollectionInfo Stats(string colName)
        {
            return(this.ReadTransaction <CollectionInfo>(colName, (col) =>
            {
                if (col == null)
                {
                    return null;
                }

                int indexPages, indexFree, dataPages, extendPages, dataFree, docSize;

                this.Usage(col, out indexPages, out indexFree, out dataPages, out extendPages, out dataFree, out docSize);

                return new CollectionInfo
                {
                    Name = colName,
                    DocumentsCount = (int)col.DocumentCount,
                    DocumentAverageSize = (int)((float)docSize / col.DocumentCount),
                    Indexes = this.GetIndexes(colName, true).ToList(),
                    TotalPages = indexPages + dataPages + extendPages + 1,
                    TotalAllocated = BasePage.GetSizeOfPages(indexPages + dataPages + extendPages + 1),
                    TotalFree = indexFree + dataFree,
                    Pages = new Dictionary <string, int>()
                    {
                        { "Index", indexPages },
                        { "Data", dataPages },
                        { "Extend", extendPages }
                    },
                    Allocated = new Dictionary <string, long>()
                    {
                        { "Index", BasePage.GetSizeOfPages(indexPages) },
                        { "Data", BasePage.GetSizeOfPages(dataPages + extendPages) }
                    },
                    Free = new Dictionary <string, long>()
                    {
                        { "Index", indexFree },
                        { "Data", dataFree }
                    }
                };
            }));
        }
Example #18
0
        /// <summary>
        /// List all indexes inside a collection
        /// </summary>
        public IEnumerable <BsonDocument> GetIndexes(string colName, bool stats = false)
        {
            var col = this.GetCollectionPage(colName, false);

            if (col == null)
            {
                yield break;
            }

            foreach (var index in col.GetIndexes(true))
            {
                var doc = new BsonDocument()
                          .Add("slot", index.Slot)
                          .Add("field", index.Field)
                          .Add("options", new BsonDocument()
                               .Add("unique", index.Options.Unique)
                               .Add("ignoreCase", index.Options.IgnoreCase)
                               .Add("removeAccents", index.Options.RemoveAccents)
                               .Add("trimWhitespace", index.Options.TrimWhitespace)
                               .Add("emptyStringToNull", index.Options.EmptyStringToNull)
                               );

                if (stats)
                {
                    _cache.CheckPoint();

                    var pages = _indexer.FindAll(index, Query.Ascending).GroupBy(x => x.Page.PageID).Count();

                    // this command can be consume too many memory!! has no CheckPoint on loop
                    var keySize = pages == 0 ? 0 : _indexer.FindAll(index, Query.Ascending).Average(x => x.KeyLength);

                    doc.Add("stats", new BsonDocument()
                            .Add("pages", pages)
                            .Add("allocated", BasePage.GetSizeOfPages(pages))
                            .Add("keyAverageSize", (int)keySize)
                            );
                }

                yield return(doc);
            }
        }
        /// <summary>
        /// Read journal file returning IEnumerable of pages
        /// </summary>
        public IEnumerable <byte[]> ReadJournal(uint lastPageID)
        {
            // if journal are not enabled, just return empty result
            if (_options.Journal == false)
            {
                yield break;
            }

            // position stream at begin journal area
            _stream.Seek(BasePage.GetSizeOfPages(lastPageID + 1), SeekOrigin.Begin);

            var buffer = new byte[BasePage.PAGE_SIZE];

            while (_stream.Position <= _stream.Length)
            {
                // read page bytes from journal file
                _stream.Read(buffer, 0, BasePage.PAGE_SIZE);

                yield return(buffer);
            }
        }
Example #20
0
        /// <summary>
        ///     Get stats from a collection
        /// </summary>
        public BsonValue Stats(string colName)
        {
            var col = GetCollectionPage(colName, false);

            if (col == null)
            {
                return(BsonValue.Null);
            }

            int indexPages, indexFree, dataPages, extendPages, dataFree, docSize;

            lock (_locker)
            {
                Usage(col, out indexPages, out indexFree, out dataPages, out extendPages, out dataFree, out docSize);
            }

            return(new BsonDocument()
                   .Add("name", colName)
                   .Add("documents", (int)col.DocumentCount)
                   .Add("documentAverageSize", (int)((float)docSize / col.DocumentCount))
                   .Add("indexes", new BsonArray(GetIndexes(colName, true)))
                   .Add("pages", new BsonDocument()
                        .Add("index", indexPages)
                        .Add("data", dataPages)
                        .Add("extend", extendPages)
                        .Add("total", indexPages + dataPages + extendPages + 1)
                        )
                   .Add("usage", new BsonDocument()
                        .Add("allocated", new BsonDocument()
                             .Add("index", BasePage.GetSizeOfPages(indexPages))
                             .Add("data", BasePage.GetSizeOfPages(dataPages + extendPages))
                             .Add("total", BasePage.GetSizeOfPages(indexPages + dataPages + extendPages + 1))
                             )
                        .Add("free", new BsonDocument()
                             .Add("index", indexFree)
                             .Add("data", dataFree)
                             .Add("total", indexFree + dataFree)
                             )
                        ));
        }
Example #21
0
        /// <summary>
        /// Read journal file returning IEnumerable of pages
        /// </summary>
        public IEnumerable <byte[]> ReadJournal(uint lastPageID)
        {
            // position stream at begin journal area
            var pos = BasePage.GetSizeOfPages(lastPageID + 1);

            _stream.Seek(pos, SeekOrigin.Begin);

            var buffer = new byte[BasePage.PAGE_SIZE];

            while (_stream.Position < _stream.Length)
            {
                // read page bytes from journal file
                _stream.Read(buffer, 0, BasePage.PAGE_SIZE);

                yield return(buffer);

                // now set position to next journal page
                pos += BasePage.PAGE_SIZE;

                _stream.Seek(pos, SeekOrigin.Begin);
            }
        }
Example #22
0
        /// <summary>
        /// Write original bytes page in a journal file (in sequence) - if journal not exists, create.
        /// </summary>
        public void WriteJournal(ICollection <byte[]> pages, uint lastPageID)
        {
            // write journal only if enabled
            if (_options.Journal == false)
            {
                return;
            }

            var size = BasePage.GetSizeOfPages(lastPageID + 1) +
                       BasePage.GetSizeOfPages(pages.Count);

            _log.Write(Logger.JOURNAL, "extend datafile to journal - {0} pages", pages.Count);

            // set journal file length before write
            _stream.SetLength(size);

            // go to initial file position (after lastPageID)
            _stream.Seek(BasePage.GetSizeOfPages(lastPageID + 1), SeekOrigin.Begin);

            foreach (var buffer in pages)
            {
                // read pageID and pageType from buffer
                var pageID   = BitConverter.ToUInt32(buffer, 0);
                var pageType = (PageType)buffer[PAGE_TYPE_POSITION];

                _log.Write(Logger.JOURNAL, "write page #{0:0000} :: {1}", pageID, pageType);

                // write page bytes
                _stream.Write(buffer, 0, BasePage.PAGE_SIZE);
            }

            _log.Write(Logger.JOURNAL, "flush journal to disk");

            // ensure all data are persisted in disk
            this.Flush();
        }
Example #23
0
        /// <summary>
        /// Copy database do another disk
        /// </summary>
        public long Shrink()
        {
            // begin a write exclusive access
            using (var trans = _transaction.Begin(false))
            {
                try
                {
                    // create a temporary disk
                    var tempDisk = _disk.GetTempDisk();

                    // get initial disk size
                    var header = _pager.GetPage <HeaderPage>(0);
                    var diff   = 0L;

                    // create temp engine instance to copy all documents
                    using (var tempEngine = new DbEngine(tempDisk, new Logger()))
                    {
                        tempDisk.Open(false);

                        // read all collections
                        foreach (var col in _collections.GetAll())
                        {
                            // first copy all indexes
                            foreach (var index in col.GetIndexes(false))
                            {
                                tempEngine.EnsureIndex(col.CollectionName, index.Field, index.Options);
                            }

                            // then, read all documents and copy to new engine
                            var nodes = _indexer.FindAll(col.PK, Query.Ascending);

                            tempEngine.Insert(col.CollectionName,
                                              nodes.Select(node => BsonSerializer.Deserialize(_data.Read(node.DataBlock))));
                        }

                        // get final header from temp engine
                        var tempHeader = tempEngine._pager.GetPage <HeaderPage>(0, true);

                        // copy info from initial header to final header
                        tempHeader.ChangeID = header.ChangeID;

                        // lets create journal file before re-write
                        for (uint pageID = 0; pageID <= header.LastPageID; pageID++)
                        {
                            _disk.WriteJournal(pageID, _disk.ReadPage(pageID));
                        }

                        // commit journal + shrink data file
                        _disk.SetLength(BasePage.GetSizeOfPages(tempHeader.LastPageID + 1));

                        // lets re-write all pages copying from new database
                        for (uint pageID = 0; pageID <= tempHeader.LastPageID; pageID++)
                        {
                            _disk.WritePage(pageID, tempDisk.ReadPage(pageID));
                        }

                        // now delete journal
                        _disk.DeleteJournal();

                        // get diff from initial and final last pageID
                        diff = BasePage.GetSizeOfPages(header.LastPageID - tempHeader.LastPageID);

                        tempDisk.Close();
                    }

                    // unlock disk and clear cache to continue
                    trans.Commit();

                    // delete temporary disk
                    _disk.DeleteTempDisk();

                    return(diff);
                }
                catch (Exception ex)
                {
                    _log.Write(Logger.ERROR, ex.Message);
                    trans.Rollback();
                    throw;
                }
            }
        }
Example #24
0
        /// <summary>
        /// Shrink datafile to crop journal area
        /// </summary>
        public void ClearJournal(uint lastPageID)
        {
            _log.Write(Logger.JOURNAL, "shrink datafile to remove journal area");

            this.SetLength(BasePage.GetSizeOfPages(lastPageID + 1));
        }
        /// <summary>
        /// Save all dirty pages to disk
        /// </summary>
        public void PersistDirtyPages()
        {
            // get header page
            var header = _pager.GetPage <HeaderPage>(0);

            // increase file changeID (back to 0 when overflow)
            header.ChangeID = header.ChangeID == ushort.MaxValue ? (ushort)0 : (ushort)(header.ChangeID + (ushort)1);

            // mark header as dirty
            _pager.SetDirty(header);

            _log.Write(Logger.DISK, "begin disk operations - changeID: {0}", header.ChangeID);

            // write journal file in desc order to header be last page in disk
            if (_disk.IsJournalEnabled)
            {
                _disk.WriteJournal(_cache.GetDirtyPages()
                                   .OrderByDescending(x => x.PageID)
                                   .Select(x => x.DiskData)
                                   .Where(x => x.Length > 0)
                                   .ToList(), header.LastPageID);

                // mark header as recovery before start writing (in journal, must keep recovery = false)
                header.Recovery = true;
            }
            else
            {
                // if no journal extend, resize file here to fast writes
                _disk.SetLength(BasePage.GetSizeOfPages(header.LastPageID + 1));
            }

            // get all dirty page stating from Header page (SortedList)
            // header page (id=0) always must be first page to write on disk because it's will mark disk as "in recovery"
            foreach (var page in _cache.GetDirtyPages())
            {
                // page.WritePage() updated DiskData with new rendered buffer
                var buffer = _crypto == null || page.PageID == 0 ?
                             page.WritePage() :
                             _crypto.Encrypt(page.WritePage());

                _disk.WritePage(page.PageID, buffer);
            }

            if (_disk.IsJournalEnabled)
            {
                // re-write header page but now with recovery=false
                header.Recovery = false;

                _log.Write(Logger.DISK, "re-write header page now with recovery = false");

                _disk.WritePage(0, header.WritePage());
            }

            // mark all dirty pages as clean pages (all are persisted in disk and are valid pages)
            _cache.MarkDirtyAsClean();

            // flush all data direct to disk
            _disk.Flush();

            // discard journal file
            _disk.ClearJournal(header.LastPageID);
        }