Exemplo n.º 1
0
        public static void Decompress(BinaryReader reader, BinaryWriter writer, int jobCount, int chunkSize = Constants.DEFAULT_CHUNK_SIZE,
                                      CancellationToken cancellationToken = null, Loggers loggers = null)
        {
            Guard.NotNull(reader, nameof(reader));
            Guard.NotNull(writer, nameof(writer));
            Guard.NotZeroOrNegative(jobCount, nameof(jobCount));
            Guard.NotZeroOrNegative(chunkSize, nameof(chunkSize));

            Console.WriteLine("Decompress as Enumerable");

            IDefaultLogger defaultLogger = loggers?.DefaultLogger;
            IChunkLogger   chunkLogger   = loggers?.ChunkLogger;
            IJobLogger     jobLogger     = loggers?.JobLogger;

            ChunkSource.ReadHeader(reader, out int chunkCount);

            var chunks = ChunkSource.ReadChunkCompressed(reader, chunkCount)
                         .AsParallel(jobCount)
                         .Do(x => chunkLogger?.LogChunk("Read", x))
                         .Map(ChunkConverter.Unzip)
                         .Do(x => chunkLogger?.LogChunk("Proc", x))
                         .AsEnumerable(cancellationToken, jobLogger);

            int index = 0;

            foreach (var chunk in chunks)
            {
                ChunkTarget.WriteChunk(chunk, writer, chunkSize);

                chunkLogger?.LogChunk("Write", chunk);
                defaultLogger?.LogChunksProcessed(++index, chunkCount);
            }
        }
Exemplo n.º 2
0
        public static void Decompress(BinaryReader reader, BinaryWriter writer, int jobCount, int chunkSize = Constants.DEFAULT_CHUNK_SIZE,
                                      CancellationToken cancellationToken = null, Loggers loggers = null, ProfilingType profilingType = ProfilingType.None)
        {
            Guard.NotNull(reader, nameof(reader));
            Guard.NotNull(writer, nameof(writer));
            Guard.NotZeroOrNegative(jobCount, nameof(jobCount));
            Guard.NotZeroOrNegative(chunkSize, nameof(chunkSize));

            Console.WriteLine("Decompress as Pipeline");

            int index = 0;

            IDefaultLogger defaultLogger = loggers?.DefaultLogger;

            ChunkSource.ReadHeader(reader, out int chunkCount);

            var chunkEnumerator = ChunkSource.ReadChunkCompressed(reader, chunkCount).GetEnumerator();

            Pipeline <Chunk>
            .FromSource("source", (out Chunk chunk) => {
                bool next = chunkEnumerator.MoveNext();
                chunk     = next ? chunkEnumerator.Current : null;
                return(next);
            })
            .PipeMany("zip", ChunkConverter.Unzip, jobCount)
            .ToTarget("target", (Chunk chunk) => {
                ChunkTarget.WriteChunk(chunk, writer, chunkSize);

                if (profilingType == ProfilingType.None)
                {
                    defaultLogger?.LogChunksProcessed(++index, chunkCount);
                }
            })
            .Run(cancellationToken, profilingType);
        }
 public ContentItemInstaller()
 {
     _logger      = Bootstrap.Container.Resolve <IDefaultLogger>();
     _scDatastore = Bootstrap.Container.Resolve <IDataStore>(_logger);
     _sitecore    = Bootstrap.Container.Resolve <ISitecoreDataAccessService>();
     _jsonSerializationService = Bootstrap.Container.Resolve <IJsonSerializationService>();
     _comparer = Bootstrap.Container.Resolve <IItemComparer>();
 }