Пример #1
0
        public void CacheHeader_EndToEnd()
        {
            const Source         expectedTranscriptSource  = Source.BothRefSeqAndEnsembl;
            const long           expectedCreationTimeTicks = long.MaxValue;
            const GenomeAssembly expectedAssembly          = GenomeAssembly.hg19;
            const ushort         expectedVepVersion        = ushort.MaxValue;

            var expectedBaseHeader   = new Header("VEP", 1, 2, expectedTranscriptSource, expectedCreationTimeTicks, expectedAssembly);
            var expectedCustomHeader = new TranscriptCacheCustomHeader(expectedVepVersion, 0);
            var expectedHeader       = new CacheHeader(expectedBaseHeader, expectedCustomHeader);

            CacheHeader observedHeader;

            using (var ms = new MemoryStream())
            {
                using (var writer = new BinaryWriter(ms, Encoding.UTF8, true))
                {
                    expectedHeader.Write(writer);
                }

                ms.Position    = 0;
                observedHeader = CacheHeader.Read(ms);
            }

            Assert.NotNull(observedHeader);
            Assert.Equal(expectedTranscriptSource, observedHeader.Source);
            Assert.Equal(expectedCreationTimeTicks, observedHeader.CreationTimeTicks);
            Assert.Equal(expectedAssembly, observedHeader.Assembly);
            Assert.Equal(expectedVepVersion, observedHeader.Custom.VepVersion);
        }
Пример #2
0
        private static (ushort Schema, ushort Data, ushort Vep) GetHeaderInformation(string cachePath)
        {
            CacheHeader header;

            using (var stream = FileUtilities.GetReadStream(cachePath))
            {
                header = CacheHeader.Read(stream);
            }

            if (header == null)
            {
                throw new InvalidFileFormatException($"Could not parse the header information correctly for {cachePath}");
            }

            return(header.SchemaVersion, header.DataVersion, header.Custom.VepVersion);
        }
Пример #3
0
        internal static bool Fetch(
            string path,
            out int refScale,
            out Vector2I size,
            out TextureFormat format,
            out Vector2B wrapped,
            out Vector2I padding,
            out Vector2I blockPadding,
            out byte[] data
            )
        {
            refScale     = 0;
            size         = Vector2I.Zero;
            format       = TextureFormat.Color;
            wrapped      = Vector2B.False;
            padding      = Vector2I.Zero;
            blockPadding = Vector2I.Zero;
            data         = null;

            if (Config.Cache.Enabled && File.Exists(path))
            {
                int retries = Config.Cache.LockRetries;

                while (retries-- > 0)
                {
                    if (SavingMap.TryGetValue(path, out var state) && state != SaveState.Saved)
                    {
                        Thread.Sleep(Config.Cache.LockSleepMS);
                        continue;
                    }

                    // https://stackoverflow.com/questions/1304/how-to-check-for-file-lock
                    bool WasLocked(in IOException ex)
                    {
                        var errorCode = Marshal.GetHRForException(ex) & ((1 << 16) - 1);

                        return(errorCode == 32 || errorCode == 33);
                    }

                    try {
                        using (var reader = new BinaryReader(new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read))) {
                            var header = CacheHeader.Read(reader);
                            header.Validate(path);

                            refScale     = header.RefScale;
                            size         = header.Size;
                            format       = header.Format.Value;
                            wrapped      = header.Wrapped;
                            padding      = header.Padding;
                            blockPadding = header.BlockPadding;
                            var dataLength = header.DataLength;
                            var dataHash   = header.DataHash;

                            var remainingSize = reader.BaseStream.Length - reader.BaseStream.Position;
                            if (remainingSize < header.DataLength)
                            {
                                throw new EndOfStreamException("Cache File is corrupted");
                            }

                            data = new byte[dataLength];

                            foreach (int i in 0..data.Length)
                            {
                                data[i] = reader.ReadByte();
                            }

                            if (data.HashXX() != dataHash)
                            {
                                throw new IOException("Cache File is corrupted");
                            }
                        }
                        return(true);
                    }
                    catch (Exception ex) {
                        switch (ex)
                        {
                        case FileNotFoundException _:
                        case EndOfStreamException _:
                        case IOException iox when !WasLocked(iox):
                        default:
                            ex.PrintWarning();
                            try { File.Delete(path); } catch { }
                            return(false);

                        case IOException iox when WasLocked(iox):
                            Debug.TraceLn($"File was locked when trying to load cache file '{path}': {ex.Message} [{retries} retries]");

                            Thread.Sleep(Config.Cache.LockSleepMS);
                            break;
                        }
                    }
                }
            }
            return(false);
        }
Пример #4
0
        public static CacheFile Load(Stream input, bool validate)
        {
            var         headerBytes = input.ReadBytes(FileHeader.Size + CacheHeader.Size);
            Endian      endian;
            CacheHeader header;

            using (var data = new MemoryStream(headerBytes, false))
            {
                var fileHeader = FileHeader.Read(data);
                if (fileHeader.Version != 13 || fileHeader.Unknown != 0)
                {
                    throw new FormatException();
                }

                endian = fileHeader.Endian;
                header = CacheHeader.Read(data, endian);
            }

            if (validate == true)
            {
                var deadbeefBytes = BitConverter.GetBytes(0xDEADBEEFu);
                Array.Copy(deadbeefBytes, 0, headerBytes, FileHeader.Size + CacheHeader.HashOffset, 4);

                if (CRC32.Compute(headerBytes, 0, headerBytes.Length) != header.HeaderHash)
                {
                    throw new FormatException();
                }
            }

            var stringBytes = LoadArrayBytes(input, header.StringData, 1, validate);

            string[] names, tweakDBIds, resources;
            using (var data = new MemoryStream(stringBytes, false))
            {
                names      = LoadStrings(input, header.NameStringOffsets, validate, data, endian);
                tweakDBIds = LoadStrings(input, header.TweakDBIdStringOffsets, validate, data, endian);
                resources  = LoadStrings(input, header.ResourceStringOffsets, validate, data, endian);
            }

            var definitionHeaders = LoadArray(input, header.Definitions, DefinitionHeader.Size, validate, DefinitionHeader.Read, endian);
            var definitions       = new Definition[definitionHeaders.Length];

            for (int i = 1; i < definitionHeaders.Length; i++)
            {
                definitions[i] = DefinitionFactory.Create(definitionHeaders[i].Type);
            }
            for (int i = 1; i < definitionHeaders.Length; i++)
            {
                var definitionHeader = definitionHeaders[i];
                var definition       = definitions[i];
                definition.Parent = definitions[definitionHeader.ParentIndex];
                definition.Name   = names[definitionHeader.NameIndex];
            }
            var reader = new DefinitionReader(input, endian, definitions, names, tweakDBIds, resources);

            for (int i = 1; i < definitionHeaders.Length; i++)
            {
                var definitionHeader = definitionHeaders[i];
                var definition       = definitions[i];

                input.Position = definitionHeader.DataOffset;

                definition.LoadPosition = input.Position;

                if (validate == true)
                {
                    using (var data = input.ReadToMemoryStream((int)definitionHeader.DataSize))
                    {
                        var slicedReader = new DefinitionReader(data, endian, definitions, names, tweakDBIds, resources);
                        definition.Deserialize(slicedReader);
                        if (data.Position != data.Length)
                        {
                            throw new FormatException();
                        }
                    }
                }
                else
                {
                    var expectedPosition = input.Position + definitionHeader.DataSize;
                    definition.Deserialize(reader);
                    if (input.Position != expectedPosition)
                    {
                        throw new FormatException();
                    }
                }
            }

            var instance = new CacheFile();

            instance.Unknown00 = header.Unknown00;
            instance.Unknown04 = header.Unknown04;
            instance.Unknown08 = header.Unknown08;
            instance.Unknown10 = header.Unknown10;
            instance.Definitions.AddRange(definitions.Skip(1));
            return(instance);
        }