Exemple #1
0
        private List <CharacterModel> LoadCharacterModels(bool unique)
        {
            if (_cache.TryLoadCache(out var cached))
            {
                var validCached = cached.Where(x => x.UniqueCharacter == unique).ToList();
                if (validCached.Any())
                {
                    return(validCached);
                }
            }
            else
            {
                cached = new List <CharacterModel>();
            }

            var files = Prefetch.Load().Files.Keys;

            var modelBag = new ConcurrentBag <CharacterModel>();

            var tasks = new ParallelTasks <string>(
                Environment.ProcessorCount, file =>
            {
                if (IsValid(file, unique))
                {
                    foreach (var model in GetCharacterModels(file))
                    {
                        modelBag.Add(model);
                    }
                }
            });

            tasks.Start();
            tasks.AddItems(files);
            tasks.WaitForComplete();

            GC.Collect();

            var modelList = modelBag.ToList();

            cached.AddRange(modelList);
            _cache.Save(cached);

            return(modelList);
        }
        private List <T> LoadInternal()
        {
            if (_cache.TryLoadCache(out var cached))
            {
                if (cached.Any())
                {
                    return(cached);
                }
            }
            else
            {
                cached = new List <T>();
            }

            var files = Prefetch.Load().Files.Keys;
            var bag   = new ConcurrentBag <T>();

            var parallels = IoC.Debug.SingleThread ? 1 : Environment.ProcessorCount;
            var tasks     = new ParallelTasks <string>(
                parallels, file =>
            {
                if (_fileNameValidator(file) && !Ignored.Any(x => x.IsMatch(file)))
                {
                    foreach (var item in _itemGetter(file))
                    {
                        bag.Add(item);
                    }
                }
            });

            tasks.Start();
            tasks.AddItems(files);
            tasks.WaitForComplete();

            GC.Collect();

            var itemList = bag.ToList();

            cached.AddRange(itemList);
            _cache.Save(cached);

            return(itemList);
        }
        public void BuildFromFileList(string physicalPathRoot, string[] sourceFiles)
        {
            var compressedBlocks = new ConcurrentBag <CompressBlock>();

            var tasks = new ParallelTasks <CompressBlock>(
                Environment.ProcessorCount, block =>
            {
                block.CompressBuffer = new byte[BlockSize * 2];

                // Compress
                long compressedSize = Utility.OodleLZ.Compress(
                    block.DataBuffer.AsSpan(),
                    (int)block.DecompressedSize,
                    block.CompressBuffer.AsSpan());

                if (compressedSize == -1)
                {
                    throw new Exception("Buffer compression failed");
                }

                block.Size       = (uint)compressedSize;
                block.DataBuffer = null;

                compressedBlocks.Add(block);
            });

            tasks.Start();

            AddFiles(physicalPathRoot, sourceFiles, tasks);

            tasks.WaitForComplete();

            using var fs            = File.Open(_archivePath, _allowOverwrite ? FileMode.Create : FileMode.CreateNew, FileAccess.ReadWrite, FileShare.None);
            using var archiveWriter = new BinaryWriter(fs, Encoding.UTF8, true);

            archiveWriter.BaseStream.Position = CalculateArchiveHeaderLength(sourceFiles.Length, compressedBlocks.Count);
            WriteBlockEntries(archiveWriter, compressedBlocks);

            archiveWriter.BaseStream.Position = 0;
            WriteArchiveHeaders(archiveWriter);
        }
        /// <summary>
        /// Generate an archive from a set of file paths on disk.
        /// </summary>
        /// <param name="baseDirectoryRoot">Base directory to look for files</param>
        /// <param name="sourceCorePaths">List of files in Decima core path format. <see cref="baseDirectoryRoot"/>
        /// is prepended to each element.</param>
        public void BuildFromFileList(string baseDirectoryRoot, IEnumerable <string> sourceCorePaths)
        {
            var compressedBlocks = new ConcurrentBag <CompressBlock>();

            var tasks = new ParallelTasks <CompressBlock>(
                Environment.ProcessorCount, block =>
            {
                block.CompressBuffer = new byte[BlockSize * 2];

                // Compress
                long compressedSize = HZDCoreEditor.Util.OodleLZ.Compress(
                    block.DataBuffer.AsSpan(),
                    (int)block.DecompressedSize,
                    block.CompressBuffer.AsSpan());

                if (compressedSize == -1)
                {
                    throw new Exception("Buffer compression failed");
                }

                block.Size       = (uint)compressedSize;
                block.DataBuffer = null;

                compressedBlocks.Add(block);
            });

            tasks.Start();

            AddFiles(baseDirectoryRoot, sourceCorePaths, tasks);

            tasks.WaitForComplete();

            using var fs            = File.Open(_archivePath, _fileMode, FileAccess.ReadWrite, FileShare.None);
            using var archiveWriter = new BinaryWriter(fs, Encoding.UTF8, true);

            archiveWriter.BaseStream.Position = CalculateArchiveHeaderLength(sourceCorePaths.Count(), compressedBlocks.Count);
            WriteBlockEntries(archiveWriter, compressedBlocks);

            archiveWriter.BaseStream.Position = 0;
            WriteArchiveHeaders(archiveWriter);
        }