Пример #1
0
        /// <summary>
        /// Drop a collection - remove all data pages + indexes pages
        /// </summary>
        public void Drop(CollectionPage col)
        {
            // add all pages to delete
            var pages = new HashSet <uint>();

            // search for all data page and index page
            foreach (var index in col.GetIndexes(true))
            {
                // get all nodes from index
                var nodes = _indexer.FindAll(index, Query.Ascending);

                foreach (var node in nodes)
                {
                    // if is PK index, add dataPages
                    if (index.Slot == 0)
                    {
                        pages.Add(node.DataBlock.PageID);

                        // read datablock to check if there is any extended page
                        var block = _data.GetBlock(node.DataBlock);

                        if (block.ExtendPageID != uint.MaxValue)
                        {
                            _pager.DeletePage(block.ExtendPageID, true);
                        }
                    }

                    // memory checkpoint
                    _trans.CheckPoint();

                    // add index page to delete list page
                    pages.Add(node.Position.PageID);
                }

                // remove head+tail nodes in all indexes
                pages.Add(index.HeadNode.PageID);
                pages.Add(index.TailNode.PageID);
            }

            // and now, lets delete all this pages
            foreach (var pageID in pages)
            {
                // delete page
                _pager.DeletePage(pageID);

                // memory checkpoint
                _trans.CheckPoint();
            }

            // get header page to remove from collection list links
            var header = _pager.GetPage <HeaderPage>(0);

            header.CollectionPages.Remove(col.CollectionName);

            // set header as dirty after remove
            _pager.SetDirty(header);

            _pager.DeletePage(col.PageID);
        }
Пример #2
0
        /// <summary>
        /// Encapsulate all operations in a single write transaction
        /// </summary>
        private T Transaction <T>(string collection, bool addIfNotExists, Func <CollectionPage, T> action)
        {
            // always starts write operation locking database
            using (_locker.Write())
            {
                try
                {
                    var col = this.GetCollectionPage(collection, addIfNotExists);

                    var result = action(col);

                    _trans.PersistDirtyPages();

                    _trans.CheckPoint();

                    return(result);
                }
                catch (Exception ex)
                {
                    _log.Write(Logger.ERROR, ex.Message);

                    // if an error occurs during an operation, rollback must be called to avoid datafile inconsistent
                    _cache.DiscardDirtyPages();

                    throw;
                }
            }
        }
Пример #3
0
        public IEnumerable <BsonValue> GetIndexKeys(TransactionService trans, Logger log)
        {
            var index = _bufferSize;

            while (index > 0)
            {
                trans.CheckPoint();

                // read next node
                this.HasMore = this.Nodes.MoveNext();

                // skip N nodes
                if (--_skip >= 0)
                {
                    continue;
                }

                if (this.HasMore == false)
                {
                    yield break;
                }

                // and limit in N max
                if (--_limit <= -1)
                {
                    this.HasMore = false;
                    yield break;
                }

                index--;

                // increment counter key
                _counter++;

                // avoid lock again just to check limit
                if (_limit == 0)
                {
                    this.HasMore = false;
                }

                yield return(this.Nodes.Current.Key);
            }

            // for next run, must skip counter because do continue after last
            _skip = _counter + _initialSkip;
        }
Пример #4
0
        /// <summary>
        /// Fetch documents from enumerator and add to buffer. If cache recycle, stop read to execute in another read
        /// </summary>
        public void FetchCacheIDs(TransactionService trans, DataService data, BsonReader bsonReader)
        {
            // empty document buffer
            this.Clean();

            // while until must cache not recycle
            while (trans.CheckPoint() == false)
            {
                // read next node
                this.HasMore = _nodes.MoveNext();

                // if finish, exit loop
                if (this.HasMore == false)
                {
                    return;
                }

                // get current node
                var node = _nodes.Current;

                // read document from data block
                var buffer = data.Read(node.DataBlock);
                var doc    = bsonReader.Deserialize(buffer).AsDocument;

                // if need run in full scan, execute full scan and test return
                if (_query.UseFilter)
                {
                    // execute query condition here - if false, do not add on final results
                    if (_query.FilterDocument(doc) == false)
                    {
                        continue;
                    }
                }

                // increment position cursor
                _position++;

                string id = doc[_LITEDB_CONST.FIELD_ID].AsString;
                if (!this.CacheIDs.ContainsKey(id))
                {
                    this.CacheIDs.Add(id, doc);
                }
            }
        }
Пример #5
0
        /// <summary>
        /// Fetch documents from enumerator and add to buffer. If cache recycle, stop read to execute in another read
        /// </summary>
        public void Fetch(TransactionService trans, DataService data, BsonReader bsonReader)
        {
            // empty document buffer
            this.Documents.Clear();

            // while until must cache not recycle
            while (trans.CheckPoint() == false)
            {
                // read next node
                this.HasMore = _nodes.MoveNext();

                // if finish, exit loop
                if (this.HasMore == false)
                {
                    return;
                }

                // if run ONLY under index, skip/limit before deserialize
                if (_query.UseIndex && _query.UseFilter == false)
                {
                    if (--_skip >= 0)
                    {
                        continue;
                    }

                    if (--_limit <= -1)
                    {
                        this.HasMore = false;
                        return;
                    }
                }

                // get current node
                var node = _nodes.Current;

                // read document from data block
                var buffer = data.Read(node.DataBlock);
                var doc    = bsonReader.Deserialize(buffer).AsDocument;

                // if need run in full scan, execute full scan and test return
                if (_query.UseFilter)
                {
                    // execute query condition here - if false, do not add on final results
                    if (_query.FilterDocument(doc) == false)
                    {
                        continue;
                    }

                    // implement skip/limit after deserialize in full scan
                    if (--_skip >= 0)
                    {
                        continue;
                    }

                    if (--_limit <= -1)
                    {
                        this.HasMore = false;
                        return;
                    }
                }

                // increment position cursor
                _position++;

                // avoid lock again just to check limit
                if (_limit == 0)
                {
                    this.HasMore = false;
                }

                this.Documents.Add(doc);
            }
        }
Пример #6
0
        public IEnumerable <BsonDocument> GetDocuments(TransactionService trans, DataService data, Logger log)
        {
            var index = _bufferSize;

            while (index > 0)
            {
                // checks if cache are full
                trans.CheckPoint();

                // read next node
                this.HasMore = this.Nodes.MoveNext();

                // if finish, exit loop
                if (this.HasMore == false)
                {
                    yield break;
                }

                // if run ONLY under index, skip/limit before deserialize
                if (_query.UseIndex && _query.UseFilter == false)
                {
                    if (--_skip >= 0)
                    {
                        continue;
                    }

                    if (--_limit <= -1)
                    {
                        this.HasMore = false;
                        yield break;
                    }
                }

                // get current node
                var node = this.Nodes.Current;

                // read document from data block
                var buffer = data.Read(node.DataBlock);
                var doc    = BsonSerializer.Deserialize(buffer).AsDocument;

                // if need run in full scan, execute full scan and test return
                if (_query.UseFilter)
                {
                    // execute query condition here - if false, do not add on final results
                    if (_query.FilterDocument(doc) == false)
                    {
                        continue;
                    }

                    // implement skip/limit after deserialize in full scan
                    if (--_skip >= 0)
                    {
                        continue;
                    }

                    if (--_limit <= -1)
                    {
                        this.HasMore = false;
                        yield break;
                    }
                }

                index--;

                // increment counter document
                _counter++;

                // avoid lock again just to check limit
                if (_limit == 0)
                {
                    this.HasMore = false;
                }

                yield return(doc);
            }

            // for next run, must skip counter because do continue after last
            _skip = _counter + _initialSkip;
        }
Пример #7
0
        public IEnumerable <BsonDocument> GetDocuments(TransactionService trans, DataService data, Logger log)
        {
            if (_skip > 0)
            {
                log.Write(Logger.QUERY, "skiping {0} documents", _skip);
            }

            // while until must cache not recycle
            while (trans.CheckPoint() == false)
            {
                // read next node
                this.HasMore = this.Nodes.MoveNext();

                // if finish, exit loop
                if (this.HasMore == false)
                {
                    yield break;
                }

                // if run ONLY under index, skip/limit before deserialize
                if (_query.UseIndex && _query.UseFilter == false)
                {
                    if (--_skip >= 0)
                    {
                        continue;
                    }

                    if (--_limit <= -1)
                    {
                        this.HasMore = false;
                        yield break;
                    }
                }

                // get current node
                var node = this.Nodes.Current;

                // read document from data block
                var buffer = data.Read(node.DataBlock);
                var doc    = BsonSerializer.Deserialize(buffer).AsDocument;

                // if need run in full scan, execute full scan and test return
                if (_query.UseFilter)
                {
                    // execute query condition here - if false, do not add on final results
                    if (_query.FilterDocument(doc) == false)
                    {
                        continue;
                    }

                    // implement skip/limit after deserialize in full scan
                    if (--_skip >= 0)
                    {
                        continue;
                    }

                    if (--_limit <= -1)
                    {
                        this.HasMore = false;
                        yield break;
                    }
                }

                // increment position cursor
                _position++;

                // avoid lock again just to check limit
                if (_limit == 0)
                {
                    this.HasMore = false;
                }

                yield return(doc);
            }
        }