public static void Decompress(BinaryReader reader, BinaryWriter writer, int jobCount, int chunkSize = Constants.DEFAULT_CHUNK_SIZE,
                                      CancellationToken cancellationToken = null, Loggers loggers = null, ProfilingType profilingType = ProfilingType.None)
        {
            Guard.NotNull(reader, nameof(reader));
            Guard.NotNull(writer, nameof(writer));
            Guard.NotZeroOrNegative(jobCount, nameof(jobCount));
            Guard.NotZeroOrNegative(chunkSize, nameof(chunkSize));

            Console.WriteLine("Decompress as Pipeline");

            int index = 0;

            IDefaultLogger defaultLogger = loggers?.DefaultLogger;

            ChunkSource.ReadHeader(reader, out int chunkCount);

            var chunkEnumerator = ChunkSource.ReadChunkCompressed(reader, chunkCount).GetEnumerator();

            Pipeline <Chunk>
            .FromSource("source", (out Chunk chunk) => {
                bool next = chunkEnumerator.MoveNext();
                chunk     = next ? chunkEnumerator.Current : null;
                return(next);
            })
            .PipeMany("zip", ChunkConverter.Unzip, jobCount)
            .ToTarget("target", (Chunk chunk) => {
                ChunkTarget.WriteChunk(chunk, writer, chunkSize);

                if (profilingType == ProfilingType.None)
                {
                    defaultLogger?.LogChunksProcessed(++index, chunkCount);
                }
            })
            .Run(cancellationToken, profilingType);
        }
        public static void Decompress(BinaryReader reader, BinaryWriter writer, int jobCount, int chunkSize = Constants.DEFAULT_CHUNK_SIZE,
                                      CancellationToken cancellationToken = null, Loggers loggers = null)
        {
            Guard.NotNull(reader, nameof(reader));
            Guard.NotNull(writer, nameof(writer));
            Guard.NotZeroOrNegative(jobCount, nameof(jobCount));
            Guard.NotZeroOrNegative(chunkSize, nameof(chunkSize));

            Console.WriteLine("Decompress as Enumerable");

            IDefaultLogger defaultLogger = loggers?.DefaultLogger;
            IChunkLogger   chunkLogger   = loggers?.ChunkLogger;
            IJobLogger     jobLogger     = loggers?.JobLogger;

            ChunkSource.ReadHeader(reader, out int chunkCount);

            var chunks = ChunkSource.ReadChunkCompressed(reader, chunkCount)
                         .AsParallel(jobCount)
                         .Do(x => chunkLogger?.LogChunk("Read", x))
                         .Map(ChunkConverter.Unzip)
                         .Do(x => chunkLogger?.LogChunk("Proc", x))
                         .AsEnumerable(cancellationToken, jobLogger);

            int index = 0;

            foreach (var chunk in chunks)
            {
                ChunkTarget.WriteChunk(chunk, writer, chunkSize);

                chunkLogger?.LogChunk("Write", chunk);
                defaultLogger?.LogChunksProcessed(++index, chunkCount);
            }
        }
Beispiel #3
0
 public World(ChunkSource source)
 {
     TimeFactor = 0.02;
     EntityHandler = new EntityHandler(this);
     Lighting = new LightingEngine(this);
     ChunkCache = new ChunkCache(source, new FlatChunkGenerator(Lighting));
     ChunkPool = new ActiveChunkPool(this, ChunkCache);
 }
Beispiel #4
0
 public ChunkCache(ChunkSource source, ChunkGenerator generator)
 {
     this.source    = source;
     this.generator = generator;
 }
Beispiel #5
0
 public ChunkCache(ChunkSource source, ChunkGenerator generator)
 {
     this.source = source;
     this.generator = generator;
 }