public TranscriptCacheWriter(Stream stream, CacheHeader header, bool leaveOpen = false) { _blockStream = new BlockStream(new Zstandard(), stream, CompressionMode.Compress); _writer = new ExtendedBinaryWriter(_blockStream, Encoding.UTF8, leaveOpen); _header = header; _leaveOpen = leaveOpen; }
public void CacheHeader_EndToEnd() { const Source expectedTranscriptSource = Source.BothRefSeqAndEnsembl; const long expectedCreationTimeTicks = long.MaxValue; const GenomeAssembly expectedAssembly = GenomeAssembly.hg19; const ushort expectedVepVersion = ushort.MaxValue; var expectedBaseHeader = new Header("VEP", 1, 2, expectedTranscriptSource, expectedCreationTimeTicks, expectedAssembly); var expectedCustomHeader = new TranscriptCacheCustomHeader(expectedVepVersion, 0); var expectedHeader = new CacheHeader(expectedBaseHeader, expectedCustomHeader); CacheHeader observedHeader; using (var ms = new MemoryStream()) { using (var writer = new BinaryWriter(ms, Encoding.UTF8, true)) { expectedHeader.Write(writer); } ms.Position = 0; observedHeader = CacheHeader.Read(ms); } Assert.NotNull(observedHeader); Assert.Equal(expectedTranscriptSource, observedHeader.Source); Assert.Equal(expectedCreationTimeTicks, observedHeader.CreationTimeTicks); Assert.Equal(expectedAssembly, observedHeader.Assembly); Assert.Equal(expectedVepVersion, observedHeader.Custom.VepVersion); }
public PredictionCacheWriter(BlockStream blockStream, CacheHeader header, bool leaveOpen = false) { _blockStream = blockStream; _writer = new BinaryWriter(blockStream); _header = header; _leaveOpen = leaveOpen; }
private static void WriteTranscripts(ILogger logger, CacheHeader header, IntervalArray <ITranscript>[] transcriptIntervalArrays, IntervalArray <IRegulatoryRegion>[] regulatoryRegionIntervalArrays) { var staging = TranscriptCacheStaging.GetStaging(header, transcriptIntervalArrays, regulatoryRegionIntervalArrays); logger.Write("- writing transcripts... "); staging.Write(FileUtilities.GetCreateStream(CacheConstants.TranscriptPath(_outputPrefix))); logger.WriteLine("finished."); }
internal static CacheHeader Read(BinaryReader reader) { var newHeader = new CacheHeader(); foreach (var setField in ReadValues) { setField(newHeader, reader); } return(newHeader); }
public static TranscriptCacheStaging GetStaging(CacheHeader header, IntervalArray <ITranscript>[] transcriptIntervalArrays, IntervalArray <IRegulatoryRegion>[] regulatoryRegionIntervalArrays) { var uniqueData = GetUniqueData(transcriptIntervalArrays); var cacheData = new TranscriptCacheData(header, uniqueData.Genes, uniqueData.TranscriptRegions, uniqueData.Mirnas, uniqueData.PeptideSeqs, transcriptIntervalArrays, regulatoryRegionIntervalArrays); return(new TranscriptCacheStaging(cacheData)); }
public TranscriptCacheData(CacheHeader header, IGene[] genes, ITranscriptRegion[] transcriptRegions, IInterval[] mirnas, string[] peptideSeqs, IntervalArray <ITranscript>[] transcriptIntervalArrays, IntervalArray <IRegulatoryRegion>[] regulatoryRegionIntervalArrays) { Header = header; Genes = genes; TranscriptRegions = transcriptRegions; Mirnas = mirnas; PeptideSeqs = peptideSeqs; TranscriptIntervalArrays = transcriptIntervalArrays; RegulatoryRegionIntervalArrays = regulatoryRegionIntervalArrays; }
public CacheFile(string Filename, string Build) : base(Filename, Build) { Reader.EndianType = EndianFormat.LittleEndian; Version = DefinitionSet.Halo1PC; Header = new CacheHeader(this); IndexHeader = new CacheIndexHeader(this); IndexItems = new IndexTable(this); Strings = new StringTable(this); LocaleTables = new List<LocaleTable>(); }
public CacheFile(string Filename, string Build) : base(Filename, Build) { Reader.Format = EndianFormat.Little; Version = DefinitionSet.Halo1PC; Header = new CacheHeader(this); IndexHeader = new CacheIndexHeader(this); IndexItems = new IndexTable(this); Strings = new StringTable(this); LocaleTables = new List <LocaleTable>(); }
public TranscriptCacheReaderTests() { var chr1 = new Chromosome("chr1", "1", 0); var chr2 = new Chromosome("chr2", "2", 1); var chr3 = new Chromosome("chr3", "3", 2); _refIndexToChromosome = new Dictionary <ushort, IChromosome> { [chr1.Index] = chr1, [chr2.Index] = chr2, [chr3.Index] = chr3 }; const GenomeAssembly genomeAssembly = GenomeAssembly.GRCh38; var baseHeader = new Header("test", 2, 3, Source.BothRefSeqAndEnsembl, 4, genomeAssembly); var customHeader = new TranscriptCacheCustomHeader(1, 2); _expectedHeader = new CacheHeader(baseHeader, customHeader); var transcriptRegions = new ITranscriptRegion[] { new TranscriptRegion(TranscriptRegionType.Exon, 1, 100, 199, 300, 399), new TranscriptRegion(TranscriptRegionType.Intron, 1, 200, 299, 399, 400), new TranscriptRegion(TranscriptRegionType.Exon, 2, 300, 399, 400, 499) }; var mirnas = new IInterval[2]; mirnas[0] = new Interval(100, 200); mirnas[1] = new Interval(300, 400); var peptideSeqs = new[] { "MASE*" }; var genes = new IGene[1]; genes[0] = new Gene(chr3, 100, 200, true, "TP53", 300, CompactId.Convert("7157"), CompactId.Convert("ENSG00000141510")); var regulatoryRegions = new IRegulatoryRegion[2]; regulatoryRegions[0] = new RegulatoryRegion(chr3, 1200, 1300, CompactId.Convert("123"), RegulatoryRegionType.enhancer); regulatoryRegions[1] = new RegulatoryRegion(chr3, 1250, 1450, CompactId.Convert("456"), RegulatoryRegionType.enhancer); var regulatoryRegionIntervalArrays = regulatoryRegions.ToIntervalArrays(3); var transcripts = GetTranscripts(chr3, genes, transcriptRegions, mirnas); var transcriptIntervalArrays = transcripts.ToIntervalArrays(3); _expectedCacheData = new TranscriptCacheData(_expectedHeader, genes, transcriptRegions, mirnas, peptideSeqs, transcriptIntervalArrays, regulatoryRegionIntervalArrays); }
/// <summary> /// caches response before it is sent to client if it is a CacheableResponse or if the NegotationContext has the nancy-rapidcache /// header set. /// </summary> /// <param name="context"></param> private static void SetCache(NancyContext context) { if (context.Response is CachedResponse) { return; } if (context.Request.Query is DynamicDictionary dict) { if (DisableCache.Enabled && dict.ContainsKey(DisableCache.Key)) { return; } } string key = _cacheKeyGenerator.Get(context.Request); if (string.IsNullOrEmpty(key)) { return; } if (context.Response.StatusCode != HttpStatusCode.OK) { _cacheStore.Remove(key); return; } var currentCache = _cacheStore.Get(key); var now = DateTime.UtcNow; if (context.Response is CacheableResponse cacheableResponse) { if (currentCache == null || currentCache?.Expiration < now) { _cacheStore.Set(key, context, cacheableResponse.Expiration); } } else if (context.NegotiationContext.Headers.ContainsKey(CacheHeader.ToLowerInvariant())) { var expiration = DateTime.Parse(context.NegotiationContext.Headers[CacheHeader], CultureInfo.InvariantCulture); context.NegotiationContext.Headers.Remove(CacheHeader); if (currentCache == null || currentCache?.Expiration < now) { _cacheStore.Set(key, context, expiration); } } }
private static Stream GetCacheStream() { const GenomeAssembly genomeAssembly = GenomeAssembly.GRCh38; var baseHeader = new Header("test", 2, 3, Source.BothRefSeqAndEnsembl, 4, genomeAssembly); var customHeader = new TranscriptCacheCustomHeader(1, 2); var expectedHeader = new CacheHeader(baseHeader, customHeader); var transcriptRegions = new ITranscriptRegion[] { new TranscriptRegion(TranscriptRegionType.Exon, 1, 100, 199, 300, 399), new TranscriptRegion(TranscriptRegionType.Intron, 1, 200, 299, 399, 400), new TranscriptRegion(TranscriptRegionType.Exon, 2, 300, 399, 400, 499) }; var mirnas = new IInterval[2]; mirnas[0] = new Interval(100, 200); mirnas[1] = new Interval(300, 400); var peptideSeqs = new[] { "MASE*" }; var genes = new IGene[1]; genes[0] = new Gene(ChromosomeUtilities.Chr3, 100, 200, true, "TP53", 300, CompactId.Convert("7157"), CompactId.Convert("ENSG00000141510")); var regulatoryRegions = new IRegulatoryRegion[2]; regulatoryRegions[0] = new RegulatoryRegion(ChromosomeUtilities.Chr3, 1200, 1300, CompactId.Convert("123"), RegulatoryRegionType.enhancer); regulatoryRegions[1] = new RegulatoryRegion(ChromosomeUtilities.Chr3, 1250, 1450, CompactId.Convert("456"), RegulatoryRegionType.enhancer); var regulatoryRegionIntervalArrays = regulatoryRegions.ToIntervalArrays(3); var transcripts = GetTranscripts(ChromosomeUtilities.Chr3, genes, transcriptRegions, mirnas); var transcriptIntervalArrays = transcripts.ToIntervalArrays(3); var expectedCacheData = new TranscriptCacheData(expectedHeader, genes, transcriptRegions, mirnas, peptideSeqs, transcriptIntervalArrays, regulatoryRegionIntervalArrays); var ms = new MemoryStream(); using (var writer = new TranscriptCacheWriter(ms, expectedHeader, true)) { writer.Write(expectedCacheData); } ms.Position = 0; return(ms); }
internal static CacheHeader Read(BinaryReader reader) { var newHeader = new CacheHeader(); foreach (var field in typeof(CacheHeader).GetFields()) { field.SetValue( newHeader, reader.Read(field.FieldType) ); } return(newHeader); }
private static void CheckHeaderVersion(CacheHeader header, GenomeAssembly refGenomeAssembly) { if (header.GenomeAssembly != refGenomeAssembly) { throw new UserErrorException(GetGenomeAssemblyErrorMessage(header.GenomeAssembly, refGenomeAssembly)); } if (header.SchemaVersion != CacheConstants.SchemaVersion) { throw new UserErrorException( $"Expected the cache schema version ({CacheConstants.SchemaVersion}) to be identical to the schema version in the cache header ({header.SchemaVersion})"); } }
public void BlockStream_EndToEnd() { string expectedString = GetRandomString(Block.DefaultSize + 10000); var customHeader = new DemoCustomHeader(new BlockStream.BlockPosition()); var header = new CacheHeader(CacheConstants.Identifier, CacheConstants.SchemaVersion, CacheConstants.DataVersion, Source.Ensembl, NumTicks, ExpectedGenomeAssembly, customHeader); using (var ms = new MemoryStream()) { WriteBlockStream(Qlz, header, customHeader, ms, expectedString); ms.Seek(0, SeekOrigin.Begin); ReadFromBlockStream(Qlz, ms, expectedString); } }
public TranscriptCacheStaging CreateTranscriptCache(MutableTranscript[] mutableTranscripts, IEnumerable <IRegulatoryRegion> regulatoryRegions, IIntervalForest <UgaGene> geneForest, int numRefSeqs) { Logger.Write("- assigning UGA genes to transcripts... "); AssignUgaGenesToTranscripts(mutableTranscripts, geneForest); Logger.WriteLine("finished."); var transcriptIntervalArrays = mutableTranscripts.ToTranscripts().ToIntervalArrays(numRefSeqs); var regulatoryRegionIntervalArrays = regulatoryRegions.ToIntervalArrays(numRefSeqs); var customHeader = new TranscriptCacheCustomHeader(_vepVersion, _vepReleaseTicks); var header = new CacheHeader(HeaderUtilities.GetHeader(_source, _genomeAssembly), customHeader); return(TranscriptCacheStaging.GetStaging(header, transcriptIntervalArrays, regulatoryRegionIntervalArrays)); }
public virtual void Close() { Reader.Close(); Reader.Dispose(); LocaleTables.Clear(); Strings.Clear(); IndexItems.Clear(); play = null; zone = null; ugh_ = null; buildNode = null; versionNode = null; vertexNode = null; Header = null; IndexHeader = null; }
private static IEnumerable <IDataSourceVersion> GetDataSourceVersions(CacheHeader header) { var dataSourceVersions = new List <IDataSourceVersion>(); if (header == null) { return(dataSourceVersions); } ushort vepVersion = header.Custom.VepVersion; var dataSourceVersion = new DataSourceVersion("VEP", vepVersion.ToString(), header.CreationTimeTicks, header.Source.ToString()); dataSourceVersions.Add(dataSourceVersion); return(dataSourceVersions); }
private static (ushort Schema, ushort Data, ushort Vep) GetHeaderInformation(string cachePath) { CacheHeader header; using (var stream = FileUtilities.GetReadStream(cachePath)) { header = CacheHeader.Read(stream); } if (header == null) { throw new InvalidFileFormatException($"Could not parse the header information correctly for {cachePath}"); } return(header.SchemaVersion, header.DataVersion, header.Custom.VepVersion); }
private static IEnumerable <IDataSourceVersion> GetDataSourceVersions(CacheHeader header) { var dataSourceVersions = new List <IDataSourceVersion>(); if (header == null) { return(dataSourceVersions); } var customHeader = header.CustomHeader as TranscriptCacheCustomHeader; var vepVersion = customHeader?.VepVersion; var dataSourceVersion = new DataSourceVersion("VEP", vepVersion.ToString(), header.CreationTimeTicks, header.TranscriptSource.ToString()); dataSourceVersions.Add(dataSourceVersion); return(dataSourceVersions); }
public CacheFile(string Filename, string Build) : base(Filename, Build) { Version = DefinitionSet.Halo3Retail; Header = new CacheHeader(this); IndexHeader = new Halo3Beta.CacheFile.CacheIndexHeader(this); IndexItems = new Halo3Beta.CacheFile.IndexTable(this); Strings = new StringTable(this); LocaleTables = new List<LocaleTable>(); try { for (int i = 0; i < int.Parse(buildNode.Attributes["languageCount"].Value); i++) LocaleTables.Add(new LocaleTable(this, (GameLanguage)i)); } catch { LocaleTables.Clear(); } }
private static IEnumerable <IDataSourceVersion> GetDataSourceVersions(CacheHeader header) { var dataSourceVersions = new List <IDataSourceVersion>(); if (header == null) { return(dataSourceVersions); } ushort vepVersion = header.Custom.VepVersion; // TODO: Embed the data source version in the next cache file format. This hack let's us handle the SARS-CoV-2 genome DataSourceVersion dataSourceVersion = vepVersion == 0 ? new DataSourceVersion("RefSeq", "NC_045512.2", new DateTime(2020, 3, 20, 0, 0, 0, DateTimeKind.Utc).Ticks, "Severe acute respiratory syndrome coronavirus 2 (SARS-CoV2)") : new DataSourceVersion("VEP", vepVersion.ToString(), header.CreationTimeTicks, header.Source.ToString()); dataSourceVersions.Add(dataSourceVersion); return(dataSourceVersions); }
public CacheFile(string Filename, string Build) : base(Filename, Build) { Version = DefinitionSet.Halo3Beta; Header = new CacheHeader(this); IndexHeader = new CacheIndexHeader(this); IndexItems = new IndexTable(this); Strings = new StringTable(this); LocaleTables = new List <LocaleTable>(); try { for (int i = 0; i < int.Parse(buildNode.Attributes["languageCount"].Value); i++) { LocaleTables.Add(new LocaleTable(this, (Language)i)); } } catch { LocaleTables.Clear(); } }
public CacheItem(CacheHeader parent) { Parent = parent; }
private static TranscriptCacheStaging GetTranscriptStaging(CacheHeader header, IntervalArray <ITranscript>[] transcriptIntervalArrays, IntervalArray <IRegulatoryRegion>[] regulatoryRegionIntervalArrays) => TranscriptCacheStaging.GetStaging(header, transcriptIntervalArrays, regulatoryRegionIntervalArrays);
public static CacheFile Load(Stream input, bool validate) { var headerBytes = input.ReadBytes(FileHeader.Size + CacheHeader.Size); Endian endian; CacheHeader header; using (var data = new MemoryStream(headerBytes, false)) { var fileHeader = FileHeader.Read(data); if (fileHeader.Version != 13 || fileHeader.Unknown != 0) { throw new FormatException(); } endian = fileHeader.Endian; header = CacheHeader.Read(data, endian); } if (validate == true) { var deadbeefBytes = BitConverter.GetBytes(0xDEADBEEFu); Array.Copy(deadbeefBytes, 0, headerBytes, FileHeader.Size + CacheHeader.HashOffset, 4); if (CRC32.Compute(headerBytes, 0, headerBytes.Length) != header.HeaderHash) { throw new FormatException(); } } var stringBytes = LoadArrayBytes(input, header.StringData, 1, validate); string[] names, tweakDBIds, resources; using (var data = new MemoryStream(stringBytes, false)) { names = LoadStrings(input, header.NameStringOffsets, validate, data, endian); tweakDBIds = LoadStrings(input, header.TweakDBIdStringOffsets, validate, data, endian); resources = LoadStrings(input, header.ResourceStringOffsets, validate, data, endian); } var definitionHeaders = LoadArray(input, header.Definitions, DefinitionHeader.Size, validate, DefinitionHeader.Read, endian); var definitions = new Definition[definitionHeaders.Length]; for (int i = 1; i < definitionHeaders.Length; i++) { definitions[i] = DefinitionFactory.Create(definitionHeaders[i].Type); } for (int i = 1; i < definitionHeaders.Length; i++) { var definitionHeader = definitionHeaders[i]; var definition = definitions[i]; definition.Parent = definitions[definitionHeader.ParentIndex]; definition.Name = names[definitionHeader.NameIndex]; } var reader = new DefinitionReader(input, endian, definitions, names, tweakDBIds, resources); for (int i = 1; i < definitionHeaders.Length; i++) { var definitionHeader = definitionHeaders[i]; var definition = definitions[i]; input.Position = definitionHeader.DataOffset; definition.LoadPosition = input.Position; if (validate == true) { using (var data = input.ReadToMemoryStream((int)definitionHeader.DataSize)) { var slicedReader = new DefinitionReader(data, endian, definitions, names, tweakDBIds, resources); definition.Deserialize(slicedReader); if (data.Position != data.Length) { throw new FormatException(); } } } else { var expectedPosition = input.Position + definitionHeader.DataSize; definition.Deserialize(reader); if (input.Position != expectedPosition) { throw new FormatException(); } } } var instance = new CacheFile(); instance.Unknown00 = header.Unknown00; instance.Unknown04 = header.Unknown04; instance.Unknown08 = header.Unknown08; instance.Unknown10 = header.Unknown10; instance.Definitions.AddRange(definitions.Skip(1)); return(instance); }
private static CacheHeader GetHeader(CacheHeader header) => new CacheHeader(CacheConstants.Identifier, header.SchemaVersion, header.DataVersion, Source.BothRefSeqAndEnsembl, DateTime.Now.Ticks, header.GenomeAssembly, header.CustomHeader);
public LocaleTable(CacheBase Cache, Language Lang) { cache = Cache; EndianReader Reader = cache.Reader; CacheHeader CH = cache.Header; #region Get Info int matgOffset = -1; foreach (IndexItem item in cache.IndexItems) { if (item.ClassCode == "matg") { matgOffset = item.Offset; break; } } if (matgOffset == -1) { return; } int localeStart = int.Parse(cache.buildNode.Attributes["localesStart"].Value); Reader.SeekTo(matgOffset + localeStart + (int)Lang * int.Parse(cache.buildNode.Attributes["languageSize"].Value)); int localeCount = Reader.ReadInt32(); int tableSize = Reader.ReadInt32(); int indexOffset = Reader.ReadInt32() + CH.localeModifier; int tableOffset = Reader.ReadInt32() + CH.localeModifier; #endregion #region Read Indices Reader.SeekTo(indexOffset); int[] indices = new int[localeCount]; for (int i = 0; i < localeCount; i++) { this.Add(""); Reader.ReadInt32(); indices[i] = Reader.ReadInt32(); } #endregion #region Read Names Reader.SeekTo(tableOffset); EndianReader newReader = (cache.localesKey == "" || cache.localesKey == null) ? new EndianReader(new MemoryStream(Reader.ReadBytes(tableSize)), EndianFormat.BigEndian) : AES.DecryptSegment(Reader, tableOffset, tableSize, cache.localesKey); for (int i = 0; i < indices.Length; i++) { if (indices[i] == -1) { this[i] = "<null>"; continue; } newReader.SeekTo(indices[i]); int length; if (i == indices.Length - 1) { length = tableSize - indices[i]; } else { length = (indices[i + 1] != -1) ? indices[i + 1] - indices[i] : indices[i + 2] - indices[i]; } if (length == 1) { this[i] = "<blank>"; continue; } this[i] = newReader.ReadString(length); } newReader.Close(); newReader.Dispose(); #endregion }
internal PredictionCacheStaging(CacheHeader header, Prediction.Entry[] lut, Prediction[][] predictionsPerRef) { _header = header; _lookupTable = lut; _predictionsPerRef = predictionsPerRef; }
private static CacheHeader CloneHeader(CacheHeader header) => new CacheHeader(CloneBaseHeader(header), header.Custom);
internal static bool Fetch( string path, out int refScale, out Vector2I size, out TextureFormat format, out Vector2B wrapped, out Vector2I padding, out Vector2I blockPadding, out byte[] data ) { refScale = 0; size = Vector2I.Zero; format = TextureFormat.Color; wrapped = Vector2B.False; padding = Vector2I.Zero; blockPadding = Vector2I.Zero; data = null; if (Config.Cache.Enabled && File.Exists(path)) { int retries = Config.Cache.LockRetries; while (retries-- > 0) { if (SavingMap.TryGetValue(path, out var state) && state != SaveState.Saved) { Thread.Sleep(Config.Cache.LockSleepMS); continue; } // https://stackoverflow.com/questions/1304/how-to-check-for-file-lock bool WasLocked(in IOException ex) { var errorCode = Marshal.GetHRForException(ex) & ((1 << 16) - 1); return(errorCode == 32 || errorCode == 33); } try { using (var reader = new BinaryReader(new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read))) { var header = CacheHeader.Read(reader); header.Validate(path); refScale = header.RefScale; size = header.Size; format = header.Format.Value; wrapped = header.Wrapped; padding = header.Padding; blockPadding = header.BlockPadding; var dataLength = header.DataLength; var dataHash = header.DataHash; var remainingSize = reader.BaseStream.Length - reader.BaseStream.Position; if (remainingSize < header.DataLength) { throw new EndOfStreamException("Cache File is corrupted"); } data = new byte[dataLength]; foreach (int i in 0..data.Length) { data[i] = reader.ReadByte(); } if (data.HashXX() != dataHash) { throw new IOException("Cache File is corrupted"); } } return(true); } catch (Exception ex) { switch (ex) { case FileNotFoundException _: case EndOfStreamException _: case IOException iox when !WasLocked(iox): default: ex.PrintWarning(); try { File.Delete(path); } catch { } return(false); case IOException iox when WasLocked(iox): Debug.TraceLn($"File was locked when trying to load cache file '{path}': {ex.Message} [{retries} retries]"); Thread.Sleep(Config.Cache.LockSleepMS); break; } } } } return(false); }
public CacheStream(string filename) : base(filename, FileMode.Open, FileAccess.Read, FileShare.Read, 8 * 1024) { //HEADER var binaryReader = new BinaryReader(this, Encoding.UTF8); Header = CacheHeader.DeserializeFrom(this); base.Seek(Header.PathsInfo.PathTableAddress, SeekOrigin.Begin); var paths = Encoding.UTF8.GetString(binaryReader.ReadBytes(Header.PathsInfo.PathTableLength - 1)).Split(Char.MinValue); //Halo2.Paths.Assign(paths); //STRINGS base.Seek(Header.StringsInfo.StringTableAddress, SeekOrigin.Begin); Strings = Encoding.UTF8.GetString(binaryReader.ReadBytes(Header.StringsInfo.StringTableLength - 1)).Split(Char.MinValue); Halo2.Strings.Assign(new List <string>(Strings)); // INDEX base.Seek(Header.IndexInfo.IndexOffset, SeekOrigin.Begin); Index = new TagIndex(this, paths); // Calculate File-pointer magic var secondaryMagic = Index[Index.GlobalsIdent].VirtualAddress - (Header.IndexInfo.IndexOffset + Header.IndexInfo.IndexLength); DefaultMemoryBlock = new VirtualMappedAddress { Address = Index[0].VirtualAddress, Length = Header.IndexInfo.MetaAllocationLength, Magic = secondaryMagic }; /* Intent: read the sbsp and lightmap address and lengths from the scenario tag * and store them in the Tags array. */ base.Seek(Index[Index.ScenarioIdent].VirtualAddress - secondaryMagic + 528, SeekOrigin.Begin); var count = binaryReader.ReadInt32(); var address = binaryReader.ReadInt32(); StructureMemoryBlockBindings = new Dictionary <TagIdent, int>(count * 2); StructureMemoryBlocks = new List <VirtualMappedAddress>(count); for (var i = 0; i < count; ++i) { base.Seek(address - secondaryMagic + i * 68, SeekOrigin.Begin); var structureBlockOffset = binaryReader.ReadInt32(); var structureBlockLength = binaryReader.ReadInt32(); var structureBlockAddress = binaryReader.ReadInt32(); base.Seek(8, SeekOrigin.Current); var sbspIdentifier = binaryReader.ReadTagIdent(); base.Seek(4, SeekOrigin.Current); var ltmpIdentifier = binaryReader.ReadTagIdent(); base.Seek(structureBlockOffset, SeekOrigin.Begin); var blockLength = binaryReader.ReadInt32(); var sbspVirtualAddress = binaryReader.ReadInt32(); var ltmpVirtualAddress = binaryReader.ReadInt32(); var sbsp = binaryReader.ReadTagClass(); var hasLightmapData = !TagIdent.IsNull(ltmpIdentifier); var sbspLength = hasLightmapData ? ltmpVirtualAddress - sbspVirtualAddress : blockLength; var ltmpLength = blockLength - sbspLength; var block = new VirtualMappedAddress { Address = structureBlockAddress, Length = structureBlockLength, Magic = structureBlockAddress - structureBlockOffset }; var sbspDatum = Index[sbspIdentifier]; sbspDatum.VirtualAddress = sbspVirtualAddress; sbspDatum.Length = sbspLength; Index.Update(sbspIdentifier, sbspDatum); StructureMemoryBlocks.Add(block); var index = StructureMemoryBlocks.Count - 1; StructureMemoryBlockBindings[sbspIdentifier] = index; if (hasLightmapData) { var ltmpDatum = Index[ltmpIdentifier]; ltmpDatum.VirtualAddress = ltmpVirtualAddress; ltmpDatum.Length = ltmpLength; Index.Update(ltmpIdentifier, ltmpDatum); StructureMemoryBlockBindings[ltmpIdentifier] = index; } ActiveAllocation(StructureCache.VirtualStructureCache0); } _deserializedTagCache = new Dictionary <TagIdent, GuerillaBlock>(Index.Count); _tagHashDictionary = new Dictionary <TagIdent, string>(Index.Count); Initialize(); }