Exemplo n.º 1
0
        /// <summary>
        /// Deletes an item from the source data provider
        /// </summary>
        private static void RecycleItem(IItemData itemData, ISourceDataStore sourceStore, Action <IItemData> deleteMessage)
        {
            var children = sourceStore.GetChildren(itemData);

            RecycleItems(children, sourceStore, deleteMessage);

            deleteMessage(itemData);

            sourceStore.Remove(itemData);
        }
Exemplo n.º 2
0
        /// <summary>
        /// Deletes an item from the source data provider
        /// </summary>
        protected virtual void RecycleItem(IItemData itemData)
        {
            var children = _sourceDataStore.GetChildren(itemData);

            EvaluateOrphans(children.ToArray());

            _logger.RecycledItem(itemData);
            _logger.Evaluated(itemData);
            _sourceDataStore.Remove(itemData);
        }
Exemplo n.º 3
0
        /// <summary>
        /// Deletes an item from the source data provider
        /// </summary>
        private static void RecycleItem(IItemData itemData, ISourceDataStore sourceStore, Action<IItemData> deleteMessage)
        {
            var children = sourceStore.GetChildren(itemData);

            RecycleItems(children, sourceStore, deleteMessage);

            deleteMessage(itemData);

            sourceStore.Remove(itemData);
        }
Exemplo n.º 4
0
        protected virtual void DumpTreeRecursive(IItemData root, IPredicate predicate, ITargetDataStore serializationStore, ISourceDataStore sourceDataStore, ILogger logger)
        {
            var dump = DumpItemInternal(root, predicate, serializationStore);

            if (dump.IsIncluded)
            {
                foreach (var child in sourceDataStore.GetChildren(root))
                {
                    DumpTreeRecursive(child, predicate, serializationStore, sourceDataStore, logger);
                }
            }
            else
            {
                logger.Warn("[S] {0} because {1}".FormatWith(root.GetDisplayIdentifier(), dump.Justification));
            }
        }
Exemplo n.º 5
0
        protected virtual void DumpTreeInternal(IItemData root, IPredicate predicate, ITargetDataStore serializationStore, ISourceDataStore sourceDataStore, ILogger logger, IUnicornDataProviderConfiguration dpConfig)
        {
            if (dpConfig.EnableTransparentSync)
            {
                CacheManager.ClearAllCaches();
                // BOOM! This clears all caches before we begin;
                // because for a TpSync configuration we could have TpSync items in the data cache which 'taint' the reserialize
                // from being purely database
            }

            // we throw items into this queue, and let a thread pool pick up anything available to process in parallel. only the children of queued items are processed, not the item itself
            var processQueue = new Queue <IItemData>();

            using (new UnicornOperationContext())
            {
                var rootResult = DumpItemInternal(root, predicate, serializationStore);
                if (!rootResult.IsIncluded)
                {
                    return;
                }
            }

            processQueue.Enqueue(root);

            IItemData parentItem;

            while (processQueue.Count > 0)
            {
                parentItem = processQueue.Dequeue();

                using (new UnicornOperationContext())                 // disablers only work on the current thread. So we need to disable on all worker threads
                {
                    var children = sourceDataStore.GetChildren(parentItem);

                    foreach (var item in children)
                    {
                        // we dump each item in the queue item
                        // we do a whole array of children at a time because this makes the serialization of all children of a given item single threaded
                        // this gives us a deterministic result of naming when name collisions occur, which means trees will not contain random differences
                        // when reserialized (oh joy, that)
                        var dump = DumpItemInternal(item, predicate, serializationStore);
                        if (dump.IsIncluded)
                        {
                            // if the item is included, then we add its children as a queued work item
                            processQueue.Enqueue(item);
                        }
                        else
                        {
                            logger.Warn("[S] {0} because {1}".FormatWith(item.GetDisplayIdentifier(), dump.Justification));
                        }
                    }
                }
            }

            if (dpConfig.EnableTransparentSync)
            {
                CacheManager.ClearAllCaches();
                // BOOM! And we clear everything again at the end, because now
                // for a TpSync configuration we might have DATABASE items in cache where we want TpSync.
            }
        }
Exemplo n.º 6
0
        protected virtual void DumpTreeInternal(IItemData root, IPredicate predicate, ITargetDataStore serializationStore, ISourceDataStore sourceDataStore, ILogger logger)
        {
            CacheManager.ClearAllCaches();             // BOOM! This clears all caches before we begin;
            // because for a TpSync configuration we could have TpSync items in the data cache which 'taint' the reserialize
            // from being purely database

            // we throw items into this queue, and let a thread pool pick up anything available to process in parallel. only the children of queued items are processed, not the item itself
            ConcurrentQueue <IItemData> processQueue = new ConcurrentQueue <IItemData>();

            // exceptions thrown on background threads are left in here
            ConcurrentQueue <Exception> errors = new ConcurrentQueue <Exception>();

            // we keep track of how many threads are actively processing something so we know when to end the threads
            // (e.g. a thread could have nothing in the queue right now, but that's because a different thread is about
            // to add 8 things to the queue - so it shouldn't quit till all is done)
            int activeThreads = 0;

            using (new UnicornOperationContext())
            {
                var rootResult = DumpItemInternal(root, predicate, serializationStore);
                if (!rootResult.IsIncluded)
                {
                    return;
                }
            }

            processQueue.Enqueue(root);

            Thread[] pool = Enumerable.Range(0, ThreadCount).Select(i => new Thread(() =>
            {
                Process:
                Interlocked.Increment(ref activeThreads);
                IItemData parentItem;

                while (processQueue.TryDequeue(out parentItem) && errors.Count == 0)
                {
                    using (new UnicornOperationContext())                     // disablers only work on the current thread. So we need to disable on all worker threads
                    {
                        var children = sourceDataStore.GetChildren(parentItem);

                        foreach (var item in children)
                        {
                            try
                            {
                                // we dump each item in the queue item
                                // we do a whole array of children at a time because this makes the serialization of all children of a given item single threaded
                                // this gives us a deterministic result of naming when name collisions occur, which means trees will not contain random differences
                                // when reserialized (oh joy, that)
                                var dump = DumpItemInternal(item, predicate, serializationStore);
                                if (dump.IsIncluded)
                                {
                                    // if the item is included, then we add its children as a queued work item
                                    processQueue.Enqueue(item);
                                }
                                else
                                {
                                    logger.Warn("[S] {0} because {1}".FormatWith(item.GetDisplayIdentifier(), dump.Justification));
                                }
                            }
                            catch (Exception ex)
                            {
                                errors.Enqueue(ex);
                                break;
                            }
                        }
                    }
                }

                // if we get here, the queue was empty. let's make ourselves inactive.
                Interlocked.Decrement(ref activeThreads);

                // if some other thread in our pool was doing stuff, sleep for a sec to see if we can pick up their work
                if (activeThreads > 0)
                {
                    Thread.Sleep(10);
                    goto Process;                     // OH MY GOD :)
                }
            })).ToArray();

            // start the thread pool
            foreach (var thread in pool)
            {
                thread.Start();
            }

            // ...and then wait for all the threads to finish
            foreach (var thread in pool)
            {
                thread.Join();
            }

            CacheManager.ClearAllCaches();             // BOOM! And we clear everything again at the end, because now
            // for a TpSync configuration we might have DATABASE items in cache where we want TpSync.

            if (errors.Count > 0)
            {
                throw new AggregateException(errors);
            }
        }
Exemplo n.º 7
0
        protected virtual void DumpTreeInternal(IItemData root, IPredicate predicate, ITargetDataStore serializationStore, ISourceDataStore sourceDataStore, ILogger logger)
        {
            // we throw items into this queue, and let a thread pool pick up anything available to process in parallel. only the children of queued items are processed, not the item itself
            ConcurrentQueue<IItemData> processQueue = new ConcurrentQueue<IItemData>();

            // exceptions thrown on background threads are left in here
            ConcurrentQueue<Exception> errors = new ConcurrentQueue<Exception>();

            // we keep track of how many threads are actively processing something so we know when to end the threads
            // (e.g. a thread could have nothing in the queue right now, but that's because a different thread is about
            // to add 8 things to the queue - so it shouldn't quit till all is done)
            int activeThreads = 0;

            var rootResult = DumpItemInternal(root, predicate, serializationStore);
            if (!rootResult.IsIncluded) return;

            processQueue.Enqueue(root);

            Thread[] pool = Enumerable.Range(0, ThreadCount).Select(i => new Thread(() =>
            {
                Process:
                Interlocked.Increment(ref activeThreads);
                IItemData parentItem;

                while (processQueue.TryDequeue(out parentItem) && errors.Count == 0)
                {
                    using (new SecurityDisabler())
                    {
                        var children = sourceDataStore.GetChildren(parentItem);

                        foreach (var item in children)
                        {
                            try
                            {
                                // we dump each item in the queue item
                                // we do a whole array of children at a time because this makes the serialization of all children of a given item single threaded
                                // this gives us a deterministic result of naming when name collisions occur, which means trees will not contain random differences
                                // when reserialized (oh joy, that)
                                var dump = DumpItemInternal(item, predicate, serializationStore);
                                if (dump.IsIncluded)
                                {
                                    // if the item is included, then we add its children as a queued work item
                                    processQueue.Enqueue(item);
                                }
                                else
                                {
                                    logger.Warn("[S] {0} because {1}".FormatWith(item.GetDisplayIdentifier(), dump.Justification));
                                }
                            }
                            catch (Exception ex)
                            {
                                errors.Enqueue(ex);
                                break;
                            }
                        }
                    }
                }

                // if we get here, the queue was empty. let's make ourselves inactive.
                Interlocked.Decrement(ref activeThreads);

                // if some other thread in our pool was doing stuff, sleep for a sec to see if we can pick up their work
                if (activeThreads > 0)
                {
                    Thread.Sleep(10);
                    goto Process; // OH MY GOD :)
                }
            })).ToArray();

            // start the thread pool
            foreach (var thread in pool) thread.Start();

            // ...and then wait for all the threads to finish
            foreach (var thread in pool) thread.Join();

            if (errors.Count > 0) throw new AggregateException(errors);
        }