public void the_items_are_sorted() { var last = new IndexEntry(ulong.MaxValue, long.MaxValue); foreach (var item in _newtable.IterateAllInOrder()) { Assert.IsTrue(last.Key > item.Key || last.Key == item.Key && last.Position > item.Position); last = item; } }
private string GetId(IndexEntry e) { if (e == null) { return null; } return e.Id.ToString(); }
public void the_items_are_sorted() { var last = new IndexEntry(ulong.MaxValue, 0, long.MaxValue); foreach(var item in _newtable.IterateAllInOrder()) { Assert.IsTrue((last.Stream == item.Stream ? last.Version > item.Version : last.Stream > item.Stream) || ((last.Stream == item.Stream && last.Version == item.Version) && last.Position > item.Position)); last = item; } }
public void WriteIndexEntry(IContext context, ByteArrayBuffer writer, object obj) { var indexEntry = obj as IndexEntry; if (indexEntry == null) { indexEntry = new IndexEntry(ClassMetadataIdFor(context, obj), Convert.ToInt64(obj)); } writer.WriteInt(indexEntry.ClassMetadataId); writer.WriteLong(indexEntry.EnumValue); }
public void CanCallAddAfterUsingNameConstructor() { // ReSharper disable UseObjectOrCollectionInitializer var entry = new IndexEntry("index-entry"); entry.Add("qak", "qoo"); // ReSharper restore UseObjectOrCollectionInitializer Assert.AreEqual(1, entry.KeyValues.Count()); Assert.AreEqual("qak", entry.KeyValues.ElementAt(0).Key); Assert.AreEqual("qoo", entry.KeyValues.ElementAt(0).Value); }
public void AddAfterAssigningCustomListShouldThrowException() { var entry = new IndexEntry { KeyValues = new[] { new KeyValuePair<string, object>("foo", 123) } }; Assert.Throws<InvalidOperationException>(() => entry.Add("qak", "qoo")); }
public void CanInitializeWithCollectionIntializer() { var entry = new IndexEntry("index-entry") { { "foo", 123 }, { "bar", "baz" } }; Assert.AreEqual("index-entry", entry.Name); Assert.AreEqual(2, entry.KeyValues.Count()); Assert.AreEqual("foo", entry.KeyValues.ElementAt(0).Key); Assert.AreEqual(123, entry.KeyValues.ElementAt(0).Value); Assert.AreEqual("bar", entry.KeyValues.ElementAt(1).Key); Assert.AreEqual("baz", entry.KeyValues.ElementAt(1).Value); }
public void CanCallAddAfterUsingCollectionIntializer() { // ReSharper disable UseObjectOrCollectionInitializer var entry = new IndexEntry("index-entry") { { "foo", 123 }, { "bar", "baz" } }; // ReSharper restore UseObjectOrCollectionInitializer entry.Add("qak", "qoo"); Assert.AreEqual(3, entry.KeyValues.Count()); Assert.AreEqual("qak", entry.KeyValues.ElementAt(2).Key); Assert.AreEqual("qoo", entry.KeyValues.ElementAt(2).Value); }
public IndexEntry GetNext() { IndexEntry entry = null; //make sure there are still lines to read if (position <= _fileLines.Count() - 1) { string[] entryAsStrings = _fileLines[position].Split(";".ToCharArray()) .Select(str => Regex.Replace(str, @"[""\\]", string.Empty).Trim()).ToArray(); entry = new IndexEntry(int.Parse(entryAsStrings[0]), entryAsStrings[1], entryAsStrings[3], entryAsStrings[4], entryAsStrings[5]); } position++; return entry; }
private void LoadEntries() { _entries = new List<IndexEntry>(); HashSet<ulong> recordNumbers = new HashSet<ulong>(); // to check for dupes ulong offset = _entriesStart; IndexEntry entry; do { entry = new IndexEntry(_stream, offset, _folder); if (!recordNumbers.Contains(entry.RecordNum)) { // check for dupes _entries.Add(entry); if (!entry.DummyEntry) { recordNumbers.Add(entry.RecordNum); } } offset += entry.EntryLength; } while (!entry.LastEntry && offset < _entriesEnd); }
public void CanInitializeWithLongForm() { var entry = new IndexEntry { Name = "index-entry", KeyValues = new[] { new KeyValuePair<string, object>("foo", 123), new KeyValuePair<string, object>("bar", "baz") } }; Assert.AreEqual("index-entry", entry.Name); Assert.AreEqual(2, entry.KeyValues.Count()); Assert.AreEqual("foo", entry.KeyValues.ElementAt(0).Key); Assert.AreEqual(123, entry.KeyValues.ElementAt(0).Value); Assert.AreEqual("bar", entry.KeyValues.ElementAt(1).Key); Assert.AreEqual("baz", entry.KeyValues.ElementAt(1).Value); }
public void bytes_is_made_of_key_and_position() { unsafe { var entry = new IndexEntry {Stream = 0x0101, Version = 0x1234, Position = 0xFFFF}; Assert.AreEqual(0x34, entry.Bytes[0]); Assert.AreEqual(0x12, entry.Bytes[1]); Assert.AreEqual(0x00, entry.Bytes[2]); Assert.AreEqual(0x00, entry.Bytes[3]); Assert.AreEqual(0x01, entry.Bytes[4]); Assert.AreEqual(0x01, entry.Bytes[5]); Assert.AreEqual(0x00, entry.Bytes[6]); Assert.AreEqual(0x00, entry.Bytes[7]); Assert.AreEqual(0xFF, entry.Bytes[8]); Assert.AreEqual(0xFF, entry.Bytes[9]); Assert.AreEqual(0x00, entry.Bytes[10]); Assert.AreEqual(0x00, entry.Bytes[11]); Assert.AreEqual(0x00, entry.Bytes[12]); Assert.AreEqual(0x00, entry.Bytes[13]); Assert.AreEqual(0x00, entry.Bytes[14]); Assert.AreEqual(0x00, entry.Bytes[15]); } }
public bool TryGetLatestEntry(uint stream, out IndexEntry entry) { throw new System.NotImplementedException(); }
public void ReIndexShouldFailUnderTransaction() { var nodeReference = new NodeReference(1); var indexEntries = new IndexEntry[] {new IndexEntry("node")}; ExecuteRestMethodUnderTransaction(client => client.ReIndex(nodeReference, indexEntries)); }
public FastGame getGame(IndexEntry ie) { FastGame ret = new FastGame(ScidPINVOKE.scidBaseT_getGame__SWIG_0(swigCPtr, IndexEntry.getCPtr(ie)), true); return ret; }
public static MemoryStream LoadBLTEEntry(IndexEntry idxEntry, BinaryReader readStream = null, bool downloaded = false) { lock (readLock) { if (readStream == null) { return(null); } if (!downloaded) { readStream.BaseStream.Position = idxEntry.Offset + 30; } if (readStream.ReadUInt32() != 0x45544C42) { Trace.TraceError($"data.{idxEntry.Index:000}: Invalid BLTE signature at 0x{readStream.BaseStream.Position:X8}."); return(null); } var blte = new BLTEEntry(); var frameHeaderLength = readStream.ReadBEInt32(); var chunks = 0u; var size = 0L; if (frameHeaderLength == 0) { chunks = 1; size = idxEntry.Size - 38; } else { readStream.BaseStream.Position += 1; chunks = readStream.ReadUInt24(); } blte.Chunks = new BLTEChunk[chunks]; for (var i = 0; i < chunks; i++) { if (frameHeaderLength == 0) { blte.Chunks[i].CompressedSize = size; blte.Chunks[i].UncompressedSize = size - 1; } else { blte.Chunks[i].CompressedSize = readStream.ReadBEInt32(); blte.Chunks[i].UncompressedSize = readStream.ReadBEInt32(); // Skip MD5 hash readStream.BaseStream.Position += 16; } } var data = new MemoryStream(); for (int i = 0; i < chunks; i++) { var dataBytes = readStream.ReadBytes((int)blte.Chunks[i].CompressedSize); HandleDataBlock(dataBytes, i, data); } data.Position = 0; return(data); } }
private IEnumerable<IndexEntry> GetRangeInternal(ulong hash, int startVersion, int endVersion, int? limit = null) { if (startVersion < 0) throw new ArgumentOutOfRangeException("startVersion"); if (endVersion < 0) throw new ArgumentOutOfRangeException("endVersion"); var candidates = new List<IEnumerator<IndexEntry>>(); var awaiting = _awaitingMemTables; for (int index = 0; index < awaiting.Count; index++) { var range = awaiting[index].Table.GetRange(hash, startVersion, endVersion, limit).GetEnumerator(); if (range.MoveNext()) candidates.Add(range); } var map = _indexMap; foreach (var table in map.InOrder()) { var range = table.GetRange(hash, startVersion, endVersion, limit).GetEnumerator(); if (range.MoveNext()) candidates.Add(range); } var last = new IndexEntry(0, 0, 0); var first = true; while (candidates.Count > 0) { var maxIdx = GetMaxOf(candidates); var winner = candidates[maxIdx]; var best = winner.Current; if (first || ((last.Stream != best.Stream) && (last.Version != best.Version)) || last.Position != best.Position) { last = best; yield return best; first = false; } if (!winner.MoveNext()) candidates.RemoveAt(maxIdx); } }
private void loadChildrenIndexRoot() { NTFSFileStream stream = _indexRoot; _rootEntries = new List<IndexEntry>(); //Index Root UInt32 attrTypes = Util.GetUInt32(stream, 0x0); UInt32 indexBufferSize = Util.GetUInt32(stream, 0x8); Byte clustersPerIndexBuffer = Util.GetByte(stream, 0xC); UInt32 size = Util.GetUInt32(stream, 0x14); UInt32 size2 = Util.GetUInt32(stream, 0x18); UInt32 flags = Util.GetUInt32(stream, 0x1C); ulong offset = 0x20; IndexEntry entry; do { entry = new IndexEntry(stream, offset, this); _rootEntries.Add(entry); offset += entry.EntryLength; } while (!entry.LastEntry); }
public void Process (IData data, string index_name) { int end = data.Count; Debug.Assert (!flat_bmp || pages.Count == 0); Debug.Assert ((end & (page_size - 1)) == 0); IList entries = new ArrayList (); for (int page_base = 0; page_base < end; page_base += page_size) { ushort [] page_data = new ushort [page_size]; for (int i = 0; i < page_size; i++) { ushort v = data [page_base + i]; page_data[i] = v; } bool indexed = IsIndexed (page_base); Page page = GetPageForData (page_base, page_data, indexed); if (indexed) { IndexEntry index_entry = new IndexEntry (index_name, page_base, page_base + page_size, page); page.AddIndexEntry (index_entry); entries.Add (index_entry); } } indices.Add (new Index (index_name, entries)); }
public int Read(long byte_pos, byte[] buffer, int offset, int _count) { long remain = _count; int completed = 0; //we take advantage of the fact that we pretty much always read one sector at a time. //this would be really inefficient if we only read one byte at a time. //on the other hand, just in case, we could keep a cache of the most recently decoded sector. that would be easy and would solve that problem (if we had it) while (remain > 0) { int listIndex = FindInIndex(byte_pos, Read_LastIndex); IndexEntry ie = Index[listIndex]; Read_LastIndex = listIndex; if (ie.Type == 0) { //type 0 is special: its just a raw blob. so all we need to do is read straight out of the stream long blockOffset = byte_pos - ie.LogicalOffset; long bytesRemainInBlock = ie.Number - blockOffset; long todo = remain; if (bytesRemainInBlock < todo) { todo = bytesRemainInBlock; } stream.Position = ie.ECMOffset + blockOffset; while (todo > 0) { int toRead; if (todo > int.MaxValue) { toRead = int.MaxValue; } else { toRead = (int)todo; } int done = stream.Read(buffer, offset, toRead); if (done != toRead) { return(completed); } completed += done; remain -= done; todo -= done; offset += done; byte_pos += done; } //done reading the raw block; go back to check for another block continue; } //if(type 0) else { //these are sector-based types. they have similar handling. long blockOffset = byte_pos - ie.LogicalOffset; //figure out which sector within the block we're in int outSecSize; int inSecSize; int outSecOffset; if (ie.Type == 1) { outSecSize = 2352; inSecSize = 2048; outSecOffset = 0; } else if (ie.Type == 2) { outSecSize = 2336; inSecSize = 2052; outSecOffset = 16; } else if (ie.Type == 3) { outSecSize = 2336; inSecSize = 2328; outSecOffset = 16; } else { throw new InvalidOperationException(); } long secNumberInBlock = blockOffset / outSecSize; long secOffsetInEcm = secNumberInBlock * outSecSize; long bytesAskedIntoSector = blockOffset % outSecSize; long bytesRemainInSector = outSecSize - bytesAskedIntoSector; long todo = remain; if (bytesRemainInSector < todo) { todo = bytesRemainInSector; } //move stream to beginning of this sector in ecm stream.Position = ie.ECMOffset + inSecSize * secNumberInBlock; //read and decode the sector switch (ie.Type) { case 1: //TODO - read first 3 bytes if (stream.Read(Read_SectorBuf, 16, 2048) != 2048) { return(completed); } Reconstruct(Read_SectorBuf, 1); break; case 2: if (stream.Read(Read_SectorBuf, 20, 2052) != 2052) { return(completed); } Reconstruct(Read_SectorBuf, 2); break; case 3: if (stream.Read(Read_SectorBuf, 20, 2328) != 2328) { return(completed); } Reconstruct(Read_SectorBuf, 3); break; } //sector is decoded to 2352 bytes. Handling doesnt depend much on type from here Array.Copy(Read_SectorBuf, (int)bytesAskedIntoSector + outSecOffset, buffer, offset, todo); int done = (int)todo; offset += done; completed += done; remain -= done; byte_pos += done; } //not type 0 } // while(Remain) return(completed); }
public void Load(string path) { stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); //skip header stream.Seek(4, SeekOrigin.Current); long logOffset = 0; for (;;) { //read block count. this format is really stupid. maybe its good for detecting non-ecm files or something. int b = stream.ReadByte(); if (b == -1) { MisformedException(); } int bytes = 1; int T = b & 3; long N = (b >> 2) & 0x1F; int nbits = 5; while (b.Bit(7)) { if (bytes == 5) { MisformedException(); //if we're gonna need a 6th byte, this file is broken } b = stream.ReadByte(); bytes++; if (b == -1) { MisformedException(); } N |= (long)(b & 0x7F) << nbits; nbits += 7; } //end of blocks section if (N == 0xFFFFFFFF) { break; } //the 0x80000000 business is confusing, but this is almost positively an error if (N >= 0x100000000) { MisformedException(); } uint todo = (uint)N + 1; IndexEntry ie = new IndexEntry { Number = todo, ECMOffset = stream.Position, LogicalOffset = logOffset, Type = T }; Index.Add(ie); if (T == 0) { stream.Seek(todo, SeekOrigin.Current); logOffset += todo; } else if (T == 1) { stream.Seek(todo * (2048 + 3), SeekOrigin.Current); logOffset += todo * 2352; } else if (T == 2) { stream.Seek(todo * 2052, SeekOrigin.Current); logOffset += todo * 2336; } else if (T == 3) { stream.Seek(todo * 2328, SeekOrigin.Current); logOffset += todo * 2336; } else { MisformedException(); } } //TODO - endian bug. need an endian-independent binary reader with good license (miscutils is apache license) //extension methods on binary reader wont suffice, we need something that lets you control the endianness used for reading. a complete replacement. var br = new BinaryReader(stream); EDC = br.ReadInt32(); Length = logOffset; }
public TagProperty(TagId id, IndexEntry entry) { //Set this.id = id; this.entry = entry; }
/// <summary> /// Read a stream as an AaruFormat /// </summary> /// <param name="stream">AaruFormat file as a stream</param> /// <returns>Populated AaruFormat file, null on failure</returns> public static AaruFormat Deserialize(Stream stream) { try { AaruFormat aif = new AaruFormat(); using (BinaryReader br = new BinaryReader(stream, Encoding.Default, true)) { aif.Identifier = br.ReadUInt64(); aif.Application = Encoding.Unicode.GetString(br.ReadBytes(64), 0, 64); aif.ImageMajorVersion = br.ReadByte(); aif.ImageMinorVersion = br.ReadByte(); aif.ApplicationMajorVersion = br.ReadByte(); aif.ApplicationMinorVersion = br.ReadByte(); aif.MediaType = (AaruMediaType)br.ReadUInt32(); aif.IndexOffset = br.ReadUInt64(); aif.CreationTime = br.ReadInt64(); aif.LastWrittenTime = br.ReadInt64(); // If the offset is bigger than the stream, we can't read it if (aif.IndexOffset > (ulong)stream.Length) { return(null); } // Otherwise, we read in the index header stream.Seek((long)aif.IndexOffset, SeekOrigin.Begin); aif.IndexHeader = IndexHeader.Deserialize(stream); if (aif.IndexHeader.entries == 0) { return(null); } // Get the list of entries aif.IndexEntries = new IndexEntry[aif.IndexHeader.entries]; for (ushort index = 0; index < aif.IndexHeader.entries; index++) { aif.IndexEntries[index] = IndexEntry.Deserialize(stream); switch (aif.IndexEntries[index].blockType) { // We don't do anything with these block types currently case AaruBlockType.DataBlock: case AaruBlockType.DeDuplicationTable: case AaruBlockType.Index: case AaruBlockType.Index2: case AaruBlockType.GeometryBlock: case AaruBlockType.MetadataBlock: case AaruBlockType.TracksBlock: case AaruBlockType.CicmBlock: case AaruBlockType.DataPositionMeasurementBlock: case AaruBlockType.SnapshotBlock: case AaruBlockType.ParentBlock: case AaruBlockType.DumpHardwareBlock: case AaruBlockType.TapeFileBlock: case AaruBlockType.TapePartitionBlock: case AaruBlockType.CompactDiscIndexesBlock: // No-op break; // Read in all available hashes case AaruBlockType.ChecksumBlock: // If the offset is bigger than the stream, we can't read it if (aif.IndexEntries[index].offset > (ulong)stream.Length) { return(null); } // Otherwise, we read in the block stream.Seek((long)aif.IndexEntries[index].offset, SeekOrigin.Begin); ChecksumHeader checksumHeader = ChecksumHeader.Deserialize(stream); if (checksumHeader.entries == 0) { return(null); } // Read through each and pick out the ones we care about for (byte entry = 0; entry < checksumHeader.entries; entry++) { ChecksumEntry checksumEntry = ChecksumEntry.Deserialize(stream); if (checksumEntry == null) { continue; } switch (checksumEntry.type) { case AaruChecksumAlgorithm.Invalid: break; case AaruChecksumAlgorithm.Md5: aif.MD5 = checksumEntry.checksum; break; case AaruChecksumAlgorithm.Sha1: aif.SHA1 = checksumEntry.checksum; break; case AaruChecksumAlgorithm.Sha256: aif.SHA256 = checksumEntry.checksum; break; case AaruChecksumAlgorithm.SpamSum: aif.SpamSum = checksumEntry.checksum; break; } } // Once we got hashes, we return early return(aif); } } } return(aif); } catch { // We don't care what the error was at this point return(null); } }
public bool TryGetOldestEntry(string streamId, out IndexEntry entry) { entry = InvalidIndexEntry; return(false); }
/// <summary> /// Prepare the rule for first use. /// </summary> protected override void Init() { base.Init(); //Loading check config from an XML file //TODO: Implement XML Schema validation when loading XDocument xDoc; try { xDoc = XDocument.Load(Environment.ExpandEnvironmentVariables(pathToCheckConfig)); } catch (Exception e) { throw new Microsoft.Localization.OSLEBot.Exceptions.InitializingRuleException("Failed to load config file for check.", e); } var defaultNS = (xDoc.Root.GetDefaultNamespace()).NamespaceName; var brandingNames = xDoc.Root.Descendants(XName.Get("BrandingName", defaultNS)).ToArray(); brandingNameIndex = new IndexEntry[brandingNames.Length]; for (int i = 0; i < brandingNames.Length; i++) { var bNameNode = brandingNames[i]; var englishName = bNameNode.Attribute(XName.Get("Value")).Value; var englishRegexAttribute = bNameNode.Attribute(XName.Get("RegularExpression")); string englishNameRegexPattern = null; if (englishRegexAttribute == null || String.IsNullOrEmpty(englishRegexAttribute.Value)) { // create regex based on the englishName provided by user englishNameRegexPattern = GetRegexPattern(englishName); } else { //user provided a regex pattern, so use it as is englishNameRegexPattern = englishRegexAttribute.Value; } var enBrandingEntry = new BrandingEntry { BrandingName = englishName, Regex = new Regex(englishNameRegexPattern, RegexOptions.Compiled | RegexOptions.ExplicitCapture | RegexOptions.Singleline) }; // do the same for all language translations Dictionary <CultureInfo, BrandingEntry> translationMapping = null; var localizedBrandingNames = bNameNode.Descendants(XName.Get("LocalizedBrandingName", defaultNS)).ToArray(); if (localizedBrandingNames.Length > 0) { translationMapping = new Dictionary <CultureInfo, BrandingEntry>(localizedBrandingNames.Length); foreach (var lbNameNode in localizedBrandingNames) { var locName = lbNameNode.Attribute(XName.Get("Translation")).Value; var locRegexAttribute = lbNameNode.Attribute(XName.Get("RegularExpression")); string locNameRegexPattern = null; if (locRegexAttribute == null || String.IsNullOrEmpty(locRegexAttribute.Value)) { // create regex based on the localized name provided by user locNameRegexPattern = GetRegexPattern(locName); } else { locNameRegexPattern = locRegexAttribute.Value; } var locBrandingEntry = new BrandingEntry { BrandingName = locName, Regex = new Regex(locNameRegexPattern, RegexOptions.Compiled | RegexOptions.ExplicitCapture | RegexOptions.Singleline) }; var targetCulture = new CultureInfo(lbNameNode.Attribute(XName.Get("Culture")).Value); translationMapping.Add(targetCulture, locBrandingEntry); } } brandingNameIndex[i] = new IndexEntry(enBrandingEntry, translationMapping); } }
protected override void OnBackground(object state) { ReportStatus($"Loading {mapPath}..."); map = new HaloMap(mapPath); ReportStatus($"Decompiling {map.Name}..."); Group globalsGroup; Group soundGestaltGroup = null; IndexEntry soundGestalt = null; IndexEntry globals = map.Globals; try { using (var tagReader = globals.Data.GetVirtualStream().CreateReader()) { tagReader.BaseStream.Seek(globals.Address, SeekOrigin.Begin); globalsGroup = TagLookup.CreateTagGroup(globals.Root); globalsGroup.Read(tagReader); BlockField soundGlobalsTagBlock = (BlockField)globalsGroup.TagBlocks[0].Fields[4]; if (soundGlobalsTagBlock.BlockList.Count > 0) { TagId soundGestaltId = (TagId)soundGlobalsTagBlock.BlockList[0].Fields[4].Value; soundGestalt = map.IndexEntries[soundGestaltId]; soundGlobalsTagBlock.BlockList[0].Fields[4].Value = (int)TagId.Null; } } if (soundGestalt != null) { using (BinaryReader reader = soundGestalt.Data.GetVirtualStream().CreateReader()) { soundGestaltGroup = TagLookup.CreateTagGroup(soundGestalt.Root); reader.BaseStream.Seek(soundGestalt.Address, SeekOrigin.Begin); soundGestaltGroup.Read(reader); } } int num = 0; SetProgressVisibility(true); ResetProgress(map.IndexEntries.Count); var result = Parallel.ForEach(map.IndexEntries, entry => { num++; Group guerillaTagGroup; var tagGroup = TagLookup.CreateTagGroup(entry.Root); var reader = entry.Data.GetVirtualStream().CreateReader(); reader.BaseStream.Seek(entry.Address, SeekOrigin.Begin); try { tagGroup.Read(reader); } finally { guerillaTagGroup = Convert.ToGuerilla(tagGroup, soundGestaltGroup, entry, map); } string localFileName = Path.Combine($"{entry.Filename}.{guerillaTagGroup.Name}"); string tagGroupFileName = Path.Combine(tagsDirectory, localFileName); if (!Directory.Exists(Path.GetDirectoryName(tagGroupFileName))) { Directory.CreateDirectory(Path.GetDirectoryName(tagGroupFileName)); } TagGroupHeader header = new TagGroupHeader(); using (FileStream fs = new FileStream(tagGroupFileName, FileMode.Create, FileAccess.ReadWrite, FileShare.Read)) using (BinaryReader fileReader = new BinaryReader(fs)) using (BinaryWriter fileWriter = new BinaryWriter(fs)) { fs.Seek(TagGroupHeader.Size, SeekOrigin.Begin); guerillaTagGroup.Write(fileWriter); switch (guerillaTagGroup.Tag) { case "snd!": SoundTagGroup_CreateRaws(guerillaTagGroup, soundGestalt, fileWriter, ref header); break; case "mode": RenderModelTagGroup_CreateRaws(guerillaTagGroup, entry, fileWriter, ref header); break; case "sbsp": ScenarioStructureBspTagGroup_CreateRaws(guerillaTagGroup, entry, fileWriter, ref header); break; case "ltmp": ScenarioStructureLightmapTagGroup_CreateRaws(guerillaTagGroup, entry, fileWriter, ref header); break; case "weat": WeatherSystemTagGroup_CreateRaws(guerillaTagGroup, entry, fileWriter, ref header); break; case "DECR": DecoratorSetTagGroup_CreateRaws(guerillaTagGroup, entry, fileWriter, ref header); break; case "PRTM": ParticleModelTagGroup_CreateRaws(guerillaTagGroup, entry, fileWriter, ref header); break; case "jmad": AnimationTagGroup_CreateRaws(guerillaTagGroup, entry, fileWriter, ref header); break; case "bitm": BitmapTagGroup_CreateRaws(guerillaTagGroup, entry, fileWriter, ref header); break; } header.Checksum = (uint)TagGroup_CalculateChecksum(guerillaTagGroup); header.GroupTag = guerillaTagGroup.Tag.FourCc; header.Id = entry.Id.Dword; header.AbideTag = "atag"; fs.Seek(0, SeekOrigin.Begin); fileWriter.Write(header); ReportProgress(num); } }); } catch { #if DEBUG throw; #endif } finally { GC.Collect(); } ReportProgress(map.IndexEntries.Count); Thread.Sleep(2000); SetProgressVisibility(false); }
/// <summary> /// Parses the read addon stream into the instance properties. /// </summary> /// <exception cref="ReaderException">Parsing errors.</exception> private void Parse() { if (Buffer.Length == 0) { throw new ReaderException("Attempted to read from empty buffer."); } Buffer.Seek(0, SeekOrigin.Begin); BinaryReader reader = new BinaryReader(Buffer); // Ident if (String.Join(String.Empty, reader.ReadChars(Addon.Ident.Length)) != Addon.Ident) { throw new ReaderException("Header mismatch."); } FormatVersion = reader.ReadChar(); if (FormatVersion > Addon.Version) { throw new ReaderException("Can't parse version " + Convert.ToString(FormatVersion) + " addons."); } /*SteamID = */ reader.ReadUInt64(); // SteamID (long) Timestamp = new DateTime(1970, 1, 1, 0, 0, 0).ToLocalTime(). AddSeconds((double)reader.ReadInt64()); // Timestamp (long) // Required content (not used at the moment, just read out) if (FormatVersion > 1) { string content = reader.ReadNullTerminatedString(); while (content != String.Empty) { content = reader.ReadNullTerminatedString(); } } Name = reader.ReadNullTerminatedString(); Description = reader.ReadNullTerminatedString(); reader.ReadNullTerminatedString(); // This would be the author... currently not implemented /*Version = */ reader.ReadInt32(); // Addon version (unused) // File index int FileNumber = 1; int Offset = 0; while (reader.ReadInt32() != 0) { IndexEntry entry = new IndexEntry(); entry.Path = reader.ReadNullTerminatedString(); entry.Size = reader.ReadInt64(); // long long entry.CRC = reader.ReadUInt32(); // unsigned long entry.Offset = Offset; entry.FileNumber = (uint)FileNumber; Index.Add(entry); Offset += (int)entry.Size; FileNumber++; } Fileblock = (ulong)reader.BaseStream.Position; // Try to parse the description string type = String.Empty; Description = Json.ParseDescription(Description, ref type, ref Tags); Type = type; // Circumvent "A property, indexer or dynamic member access may not be passed as an out or ref parameter" }
private bool TryGetOneEntry(uint stream, int startNumber, int endNumber, out IndexEntry entry) { Ensure.Nonnegative(startNumber, "startNumber"); Ensure.Nonnegative(endNumber, "endNumber"); entry = TableIndex.InvalidIndexEntry; var startKey = BuildKey(stream, startNumber); var endKey = BuildKey(stream, endNumber); if (_midpoints != null && (startKey > _midpoints[0].Key || endKey < _midpoints[_midpoints.Length - 1].Key)) return false; var workItem = GetWorkItem(); try { var recordRange = LocateRecordRange(endKey); int low = recordRange.Item1; int high = recordRange.Item2; while (low < high) { var mid = low + (high - low) / 2; IndexEntry midpoint = ReadEntry(mid, workItem); if (midpoint.Key <= endKey) high = mid; else low = mid + 1; } var candEntry = ReadEntry(high, workItem); Debug.Assert(candEntry.Key <= endKey); if (candEntry.Key < startKey) return false; entry = candEntry; return true; } finally { ReturnWorkItem(workItem); } }
public bool TryGetOldestEntry(string streamId, out IndexEntry entry) { throw new NotImplementedException(); }
public bool TryGetOldestEntry(uint stream, out IndexEntry entry) { return TryGetSmallestEntry(stream, 0, int.MaxValue, out entry); }
private bool UpdateFileInformationForAllEntries(MemoryMappedViewAccessor indexView, Index otherIndex, bool shouldAlsoTryPopulateFromDisk) { long updatedEntriesFromOtherIndex = 0; long updatedEntriesFromDisk = 0; using (MemoryMappedFile mmf = otherIndex.GetMemoryMappedFile()) using (MemoryMappedViewAccessor otherIndexView = mmf.CreateViewAccessor()) { Parallel.ForEach( this.indexEntryOffsets, entry => { string currentIndexFilename = entry.Key; long currentIndexOffset = entry.Value; if (!IndexEntry.HasInitializedCTimeEntry(indexView, currentIndexOffset)) { long otherIndexOffset; if (otherIndex.indexEntryOffsets.TryGetValue(currentIndexFilename, out otherIndexOffset)) { if (IndexEntry.HasInitializedCTimeEntry(otherIndexView, otherIndexOffset)) { IndexEntry currentIndexEntry = new IndexEntry(indexView, currentIndexOffset); IndexEntry otherIndexEntry = new IndexEntry(otherIndexView, otherIndexOffset); currentIndexEntry.CtimeSeconds = otherIndexEntry.CtimeSeconds; currentIndexEntry.CtimeNanosecondFraction = otherIndexEntry.CtimeNanosecondFraction; currentIndexEntry.MtimeSeconds = otherIndexEntry.MtimeSeconds; currentIndexEntry.MtimeNanosecondFraction = otherIndexEntry.MtimeNanosecondFraction; currentIndexEntry.Dev = otherIndexEntry.Dev; currentIndexEntry.Ino = otherIndexEntry.Ino; currentIndexEntry.Uid = otherIndexEntry.Uid; currentIndexEntry.Gid = otherIndexEntry.Gid; currentIndexEntry.Size = otherIndexEntry.Size; Interlocked.Increment(ref updatedEntriesFromOtherIndex); } } else if (shouldAlsoTryPopulateFromDisk) { string localPath = FromGitRelativePathToDotnetFullPath(currentIndexFilename, this.repoRoot); if (NativeMethods.TryStatFileAndUpdateIndex(this.tracer, localPath, indexView, entry.Value)) { Interlocked.Increment(ref updatedEntriesFromDisk); } } } }); } this.tracer.RelatedEvent( EventLevel.Informational, "UpdateIndexFileInformation", new EventMetadata() { { "UpdatedFromOtherIndex", updatedEntriesFromOtherIndex }, { "UpdatedFromDisk", updatedEntriesFromDisk } }, Keywords.Telemetry); return((updatedEntriesFromOtherIndex > 0) || (updatedEntriesFromDisk > 0)); }
public ConcurrentDictionary <ulong, MemoryStream> ReadFiles(byte[] signature, Locales locales = Locales.enUS) { var files = new ConcurrentDictionary <ulong, MemoryStream>(); foreach (var entry in rootFile.Entries) { var rootEntries = rootFile[entry.Key]; for (var i = 0; i < rootEntries.Length; i++) { if ((rootEntries[i].Locales & locales) == locales) { var encodingEntry = encodingFile[rootEntries[i].MD5]; if (encodingEntry.Size != 0 && encodingEntry.Keys.Length > 0) { for (var j = 0; j < 0x10; j++) { IndexEntry idxEntry = default(IndexEntry); foreach (var k in encodingEntry.Keys) { if ((idxEntry = idxFiles[j][k.Slice(0, 9)]).Size != 0) { var dataFile = dataFiles[idxEntry.Index]; if (dataFile == null) { throw new InvalidOperationException("Invalid data file."); } var sigBuffer = new byte[signature.Length]; var stream = DataFile.LoadBLTEEntry(idxEntry, dataFile.readStream); stream?.Read(sigBuffer, 0, sigBuffer.Length); if (sigBuffer.Compare(signature)) { files.TryAdd(entry.Key, stream); } } } if (idxEntry.Size != 0) { break; } } for (var j = 0x10; j < idxFiles.Count; j++) { IndexEntry idxEntry = default(IndexEntry); foreach (var k in encodingEntry.Keys) { if ((idxEntry = idxFiles[j][k]).Size != 0) { var sigBuffer = new byte[signature.Length]; var stream = DataFile.LoadBLTEEntry(idxEntry, cdnConfig.DownloadFile(indexFiles[idxEntry.Index], idxEntry)); stream?.Read(sigBuffer, 0, sigBuffer.Length); if (sigBuffer.Compare(signature)) { files.TryAdd(entry.Key, stream); } } } if (idxEntry.Size != 0) { break; } } } } } } return(files); }
public object IndexEntryToObject(IContext context, object indexEntry) { IndexEntry entry = (IndexEntry)indexEntry; return(ToEnum(context, entry.ClassMetadataId, entry.EnumValue)); }
private static int GetMaxOf(List<IEnumerator<IndexEntry>> enumerators) { var max = new IndexEntry(ulong.MinValue, 0, long.MinValue); int idx = 0; for (int i = 0; i < enumerators.Count; i++) { var cur = enumerators[i].Current; if (cur.CompareTo(max) > 0) { max = cur; idx = i; } } return idx; }
public IndexEntryVertex(IndexEntry entry) : base(entry.Id.Sha) { State = entry.State; Path = entry.Path; }
public bool TryGetOldestEntry(uint stream, out IndexEntry entry) { entry = InvalidIndexEntry; return false; }
public BinaryReader DownloadFile(string archive, IndexEntry indexEntry) { var url = $"http://{DownloadUrl}/data/{archive.GetHexAt(0)}/{archive.GetHexAt(2)}/{archive}.index"; throw new NotImplementedException("CDNConfig.DownloadFile not implemented."); }
public ushort getGame(IndexEntry ie, ByteBuffer bb) { ushort ret = ScidPINVOKE.scidBaseT_getGame__SWIG_1(swigCPtr, IndexEntry.getCPtr(ie), ByteBuffer.getCPtr(bb)); return ret; }
public MemoryStream ReadFile(RootEntry[] rootEntries, Locales locales = Locales.enUS) { for (var i = 0; i < rootEntries.Length; i++) { if ((rootEntries[i].Locales & locales) == locales) { var encodingEntry = encodingFile[rootEntries[i].MD5]; if (encodingEntry.Size != 0 && encodingEntry.Keys.Length > 0) { for (var j = 0; j < 0x10; j++) { IndexEntry idxEntry = default(IndexEntry); foreach (var k in encodingEntry.Keys) { if ((idxEntry = idxFiles[j][k.Slice(0, 9)]).Size != 0) { var dataFile = dataFiles[idxEntry.Index]; if (dataFile == null) { throw new InvalidOperationException("Invalid data file."); } var ret = DataFile.LoadBLTEEntry(idxEntry, dataFile.readStream); if (ret == null) { break; } return(ret); } } if (idxEntry.Size != 0) { break; } } // CDN indices for (var j = 0x10; j < idxFiles.Count; j++) { IndexEntry idxEntry = default(IndexEntry); foreach (var k in encodingEntry.Keys) { if ((idxEntry = idxFiles[j][k]).Size != 0) { return(DataFile.LoadBLTEEntry(idxEntry, cdnConfig.DownloadFile(indexFiles[idxEntry.Index], idxEntry))); } } if (idxEntry.Size != 0) { break; } } } } } return(null); }
static void BuildIndex(string inputFolder) { var files = Directory.GetFiles(@inputFolder, "*.*"); foreach (string fullpath in files) { IndexEntry entry = new IndexEntry(); string filename = Path.GetFileName(fullpath); entry.fullpath = fullpath; entry.filename = filename; entry.offset = 0; entry.filesize = 0; entries.Add(entry); } }
public IndexRoot(byte[] rawBytes) : base(rawBytes) { var index = (int)ContentOffset; IndexedAttributeType = (AttributeType)BitConverter.ToInt32(rawBytes, index); index += 4; CollationType = (CollationTypes)BitConverter.ToInt32(rawBytes, index); index += 4; EntrySize = BitConverter.ToInt32(rawBytes, index); index += 4; NumberClusterBlocks = BitConverter.ToInt32(rawBytes, index); index += 4; OffsetToFirstIndexEntry = BitConverter.ToInt32(rawBytes, index); index += 4; TotalSizeOfIndexEntries = BitConverter.ToInt32(rawBytes, index); index += 4; AllocatedSizeOfEntries = BitConverter.ToInt32(rawBytes, index); index += 4; Flags = (IndexFlag)rawBytes[index]; index += 1; index += 3; //padding //TODO verify this var mftInfoBytes = new byte[8]; Buffer.BlockCopy(rawBytes, index, mftInfoBytes, 0, 8); index += 8; MftRecord = new MftEntryInfo(mftInfoBytes); //end verify IndexEntries = new List <IndexEntry>(); while (index < rawBytes.Length) { var indexValSize = BitConverter.ToInt16(rawBytes, index); if (indexValSize == 0x10) { //indicates no more index entries break; } if (indexValSize > rawBytes.Length - index) { indexValSize = (short)(rawBytes.Length - index); } var buff = new byte[indexValSize]; Buffer.BlockCopy(rawBytes, index, buff, 0, indexValSize); var ie = new IndexEntry(buff); IndexEntries.Add(ie); index += indexValSize; } }
private static int GetMaxOf(List<IEnumerator<IndexEntry>> enumerators) { //TODO GFY IF WE LIMIT THIS TO FOUR WE CAN UNROLL THIS LOOP AND WILL BE FASTER var max = new IndexEntry(ulong.MinValue, long.MinValue); int idx = 0; for (int i = 0; i < enumerators.Count; i++) { var cur = enumerators[i].Current; if (cur.CompareTo(max) > 0) { max = cur; idx = i; } } return idx; }
private void SoundTagGroup_CreateResources(ITagGroup tagGroup, IndexEntry entry, BinaryWriter writer, ref TagGroupHeader header) { List <long> addresses = new List <long>(); List <byte[]> buffers = new List <byte[]>(); foreach (ITagBlock pitchRange in ((BlockField)tagGroup[0][13]).BlockList) { foreach (ITagBlock permutation in ((BlockField)pitchRange[7]).BlockList) { foreach (ITagBlock chunk in ((BlockField)permutation[6]).BlockList) { int address = (int)chunk[0].Value; if (entry.Resources.TryGetResource(address, out var resource)) { header.TagResourceCount++; addresses.Add(address); buffers.Add(resource.GetBuffer()); } else if ((address & 0xC0000000) == 0) { System.Diagnostics.Debugger.Break(); } } } } foreach (ITagBlock extraInfo in ((BlockField)tagGroup[0][15]).BlockList) { ITagBlock geometry = (ITagBlock)extraInfo[2].Value; int address = (int)geometry[1].Value; if (entry.Resources.TryGetResource(address, out var resource)) { header.TagResourceCount++; addresses.Add(address); buffers.Add(resource.GetBuffer()); } } if (header.TagResourceCount > 0) { header.RawOffsetsOffset = (uint)writer.BaseStream.Position; for (int i = 0; i < header.TagResourceCount; i++) { writer.Write((int)addresses[i]); } header.RawLengthsOffset = (uint)writer.BaseStream.Position; for (int i = 0; i < header.TagResourceCount; i++) { writer.Write(buffers[i].Length); } header.RawDataOffset = (uint)writer.BaseStream.Position; for (int i = 0; i < header.TagResourceCount; i++) { writer.Write(buffers[i]); } } }
private static EventResult GetEventRecord(ITransactionFileReader reader, IndexEntry indexEntry) { var res = ReadPrepareInternal(reader, indexEntry.Position); if (!res.Success) return new EventResult(false, null); var eventRecord = new EventRecord(indexEntry.Version, res.Record); return new EventResult(true, eventRecord); }
/// <summary> /// /// </summary> protected override void ProcessRecord() { WriteObject(IndexEntry.Get(path).RecordNumber); }
public void AddIndexEntry (IndexEntry index_entry) { index_entries.Add (index_entry); }
public void Load(string path) { stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); //skip header stream.Seek(4, SeekOrigin.Current); long logOffset = 0; for (; ; ) { //read block count. this format is really stupid. maybe its good for detecting non-ecm files or something. int b = stream.ReadByte(); if (b == -1) MisformedException(); int bytes = 1; int T = b & 3; long N = (b >> 2) & 0x1F; int nbits = 5; while (b.Bit(7)) { if (bytes == 5) MisformedException(); //if we're gonna need a 6th byte, this file is broken b = stream.ReadByte(); bytes++; if (b == -1) MisformedException(); N |= (long)(b & 0x7F) << nbits; nbits += 7; } //end of blocks section if (N == 0xFFFFFFFF) break; //the 0x80000000 business is confusing, but this is almost positively an error if (N >= 0x100000000) MisformedException(); uint todo = (uint)N + 1; IndexEntry ie = new IndexEntry { Number = todo, ECMOffset = stream.Position, LogicalOffset = logOffset, Type = T }; Index.Add(ie); if (T == 0) { stream.Seek(todo, SeekOrigin.Current); logOffset += todo; } else if (T == 1) { stream.Seek(todo * (2048 + 3), SeekOrigin.Current); logOffset += todo * 2352; } else if (T == 2) { stream.Seek(todo * 2052, SeekOrigin.Current); logOffset += todo * 2336; } else if (T == 3) { stream.Seek(todo * 2328, SeekOrigin.Current); logOffset += todo * 2336; } else MisformedException(); } //TODO - endian bug. need an endian-independent binary reader with good license (miscutils is apache license) //extension methods on binary reader wont suffice, we need something that lets you control the endianness used for reading. a complete replacement. var br = new BinaryReader(stream); EDC = br.ReadInt32(); Length = logOffset; }
public bool TryGetLatestEntry(uint stream, out IndexEntry entry) { return TryGetLargestEntry(stream, 0, int.MaxValue, out entry); }
/// <summary> /// The AcquireSession method is invoked to request exclusive /// access to a session's information in the database. /// </summary> /// <param name="sessionId"> /// The session's unique identifier. /// </param> /// <returns> /// The persisted information that is maintained for /// the specified session identifier. /// </returns> public VfxFixDatabaseRecord AcquireSession(string sessionId) { VfxFixDatabaseRecord result = new VfxFixDatabaseRecord(); // REC: Determine if the session's file path exists: string sessionPath = Path.Combine(_rootPath, sessionId); if (!Directory.Exists(sessionPath)) { Directory.CreateDirectory(sessionPath); } // REC: Attempt to open and load the database entry // for the specified session: string sessionFile = Path.Combine(sessionPath, "Session.xml"); if (!File.Exists(sessionFile)) { ResetSession(sessionId); } // REC: Attempt to read the session's details from // the persisted information in the session file: FileStream fsSession = new FileStream(sessionFile, FileMode.Open, FileAccess.Read, FileShare.None); XmlSerializer xsSession = new XmlSerializer(typeof(XmlFixDatabaseRecord)); XmlFixDatabaseRecord sessionRecord = xsSession.Deserialize(fsSession) as XmlFixDatabaseRecord; result.TxSequence = sessionRecord.TxSequence; result.RxSequence = sessionRecord.RxSequence; fsSession.Close(); // REC: Create the session's lock file: string lockFile = Path.Combine(sessionPath, "Locked.txt"); FileStream fs = new FileStream(lockFile, FileMode.Create, FileAccess.ReadWrite, FileShare.None); fs.Close(); // REC: Create a new instance of the SessionDetails class // that will be used to maintain the session's information: SessionDetails sessionDetails = new SessionDetails(); // REC: Attempt to open the index file for the session: string idxFile = Path.Combine(sessionPath, "Index.xml"); FileStream idxStream = new FileStream(idxFile, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.None); // REC: Maintain a reference to the index file stream // in the session's details record: sessionDetails.IdxWriter = new StreamWriter(idxStream); // REC: Read all of the index entries from the index file // and store them in the session's details structure: StreamReader idxReader = new StreamReader(idxStream); while (idxReader.EndOfStream == false) { string idxLine = idxReader.ReadLine(); string[] idxTokens = idxLine.Split(new char[] { ':' }); IndexEntry idxEntry = new IndexEntry(); idxEntry.msgOffset = int.Parse(idxTokens[1]); idxEntry.msgLength = int.Parse(idxTokens[2]); idxEntry.msgSequence = int.Parse(idxTokens[0]); sessionDetails.Index.Add(idxEntry); } // REC: Attempt to open the message file for the session: string msgFile = Path.Combine(sessionPath, "Messages.txt"); FileStream msgStream = new FileStream(msgFile, FileMode.OpenOrCreate, FileAccess.ReadWrite, FileShare.None); // REC: Maintain a reference to the message file stream // in the session's details record: sessionDetails.MsgWriter = new StreamWriter(msgStream); // REC: Add the session details record to the internal // map, keyed by the session identifier: _mapSessions.Add(sessionId, sessionDetails); // REC: Return the session's specifics to the caller: return(result); }
private bool TryGetSmallestEntry(uint stream, int startNumber, int endNumber, out IndexEntry entry) { Ensure.Nonnegative(startNumber, "startNumber"); Ensure.Nonnegative(endNumber, "endNumber"); entry = TableIndex.InvalidIndexEntry; var startKey = BuildKey(stream, startNumber); var endKey = BuildKey(stream, endNumber); if (startKey > _maxEntry || endKey < _minEntry) return false; var workItem = GetWorkItem(); try { var recordRange = LocateRecordRange(startKey); int low = recordRange.Lower; int high = recordRange.Upper; while (low < high) { var mid = low + (high - low + 1) / 2; IndexEntry midpoint = ReadEntry(mid, workItem); if (midpoint.Key < startKey) high = mid - 1; else low = mid; } var candEntry = ReadEntry(high, workItem); if (candEntry.Key < startKey) throw new Exception(string.Format("candEntry.Key {0} < startKey {1}, stream {2}, startNum {3}, endNum {4}, PTable: {5}.", candEntry.Key, startKey, stream, startNumber, endNumber, Filename)); if (candEntry.Key > endKey) return false; entry = candEntry; return true; } finally { ReturnWorkItem(workItem); } }
public bool?VerifyMediaImage() { // This will traverse all blocks and check their CRC64 without uncompressing them DicConsole.DebugWriteLine("DiscImageChef format plugin", "Checking index integrity at {0}", header.indexOffset); imageStream.Position = (long)header.indexOffset; structureBytes = new byte[Marshal.SizeOf <IndexHeader>()]; imageStream.Read(structureBytes, 0, structureBytes.Length); IndexHeader idxHeader = Marshal.SpanToStructureLittleEndian <IndexHeader>(structureBytes); if (idxHeader.identifier != BlockType.Index) { DicConsole.DebugWriteLine("DiscImageChef format plugin", "Incorrect index identifier"); return(false); } DicConsole.DebugWriteLine("DiscImageChef format plugin", "Index at {0} contains {1} entries", header.indexOffset, idxHeader.entries); structureBytes = new byte[Marshal.SizeOf <IndexEntry>() * idxHeader.entries]; imageStream.Read(structureBytes, 0, structureBytes.Length); Crc64Context.Data(structureBytes, out byte[] verifyCrc); if (BitConverter.ToUInt64(verifyCrc, 0) != idxHeader.crc64) { DicConsole.DebugWriteLine("DiscImageChef format plugin", "Expected index CRC {0:X16} but got {1:X16}", idxHeader.crc64, BitConverter.ToUInt64(verifyCrc, 0)); return(false); } imageStream.Position -= structureBytes.Length; List <IndexEntry> vrIndex = new List <IndexEntry>(); for (ushort i = 0; i < idxHeader.entries; i++) { structureBytes = new byte[Marshal.SizeOf <IndexEntry>()]; imageStream.Read(structureBytes, 0, structureBytes.Length); IndexEntry entry = Marshal.SpanToStructureLittleEndian <IndexEntry>(structureBytes); DicConsole.DebugWriteLine("DiscImageChef format plugin", "Block type {0} with data type {1} is indexed to be at {2}", entry.blockType, entry.dataType, entry.offset); vrIndex.Add(entry); } // Read up to 1MiB at a time for verification const int VERIFY_SIZE = 1024 * 1024; foreach (IndexEntry entry in vrIndex) { imageStream.Position = (long)entry.offset; Crc64Context crcVerify; ulong readBytes; byte[] verifyBytes; switch (entry.blockType) { case BlockType.DataBlock: structureBytes = new byte[Marshal.SizeOf <BlockHeader>()]; imageStream.Read(structureBytes, 0, structureBytes.Length); BlockHeader blockHeader = Marshal.SpanToStructureLittleEndian <BlockHeader>(structureBytes); crcVerify = new Crc64Context(); readBytes = 0; DicConsole.DebugWriteLine("DiscImageChef format plugin", "Verifying data block type {0} at position {1}", entry.dataType, entry.offset); while (readBytes + VERIFY_SIZE < blockHeader.cmpLength) { verifyBytes = new byte[VERIFY_SIZE]; imageStream.Read(verifyBytes, 0, verifyBytes.Length); crcVerify.Update(verifyBytes); readBytes += (ulong)verifyBytes.LongLength; } verifyBytes = new byte[blockHeader.cmpLength - readBytes]; imageStream.Read(verifyBytes, 0, verifyBytes.Length); crcVerify.Update(verifyBytes); verifyCrc = crcVerify.Final(); if (BitConverter.ToUInt64(verifyCrc, 0) != blockHeader.cmpCrc64) { DicConsole.DebugWriteLine("DiscImageChef format plugin", "Expected block CRC {0:X16} but got {1:X16}", blockHeader.cmpCrc64, BitConverter.ToUInt64(verifyCrc, 0)); return(false); } break; case BlockType.DeDuplicationTable: structureBytes = new byte[Marshal.SizeOf <DdtHeader>()]; imageStream.Read(structureBytes, 0, structureBytes.Length); DdtHeader ddtHeader = Marshal.SpanToStructureLittleEndian <DdtHeader>(structureBytes); crcVerify = new Crc64Context(); readBytes = 0; DicConsole.DebugWriteLine("DiscImageChef format plugin", "Verifying deduplication table type {0} at position {1}", entry.dataType, entry.offset); while (readBytes + VERIFY_SIZE < ddtHeader.cmpLength) { verifyBytes = new byte[readBytes]; imageStream.Read(verifyBytes, 0, verifyBytes.Length); crcVerify.Update(verifyBytes); readBytes += (ulong)verifyBytes.LongLength; } verifyBytes = new byte[ddtHeader.cmpLength - readBytes]; imageStream.Read(verifyBytes, 0, verifyBytes.Length); crcVerify.Update(verifyBytes); verifyCrc = crcVerify.Final(); if (BitConverter.ToUInt64(verifyCrc, 0) != ddtHeader.cmpCrc64) { DicConsole.DebugWriteLine("DiscImageChef format plugin", "Expected DDT CRC {0:X16} but got {1:X16}", ddtHeader.cmpCrc64, BitConverter.ToUInt64(verifyCrc, 0)); return(false); } break; case BlockType.TracksBlock: structureBytes = new byte[Marshal.SizeOf <TracksHeader>()]; imageStream.Read(structureBytes, 0, structureBytes.Length); TracksHeader trkHeader = Marshal.SpanToStructureLittleEndian <TracksHeader>(structureBytes); DicConsole.DebugWriteLine("DiscImageChef format plugin", "Track block at {0} contains {1} entries", header.indexOffset, trkHeader.entries); structureBytes = new byte[Marshal.SizeOf <TrackEntry>() * trkHeader.entries]; imageStream.Read(structureBytes, 0, structureBytes.Length); Crc64Context.Data(structureBytes, out verifyCrc); if (BitConverter.ToUInt64(verifyCrc, 0) != trkHeader.crc64) { DicConsole.DebugWriteLine("DiscImageChef format plugin", "Expected index CRC {0:X16} but got {1:X16}", trkHeader.crc64, BitConverter.ToUInt64(verifyCrc, 0)); return(false); } break; default: DicConsole.DebugWriteLine("DiscImageChef format plugin", "Ignored field type {0}", entry.blockType); break; } } return(true); }
/// <summary> /// deserializes the prediction cache associated with a particular index entry (reference seq) /// </summary> public static PredictionCache Read(BinaryReader reader, Prediction.Entry[] lookupTable, IndexEntry indexEntry, IFileHeader header) { var predictions = new Prediction[indexEntry.Count]; for (int i = 0; i < indexEntry.Count; i++) { predictions[i] = Prediction.Read(reader, lookupTable); } return(new PredictionCache(header, lookupTable, predictions)); }
public IEnumerable <Tuple <ulong, MemoryStream> > ReadFile(Locales locales = Locales.enUS) { foreach (var entry in rootFile.Entries) { var rootEntries = rootFile[entry.Key]; for (var i = 0; i < rootEntries.Length; i++) { if ((rootEntries[i].Locales & locales) == locales) { var encodingEntry = encodingFile[rootEntries[i].MD5]; if (encodingEntry.Size != 0 && encodingEntry.Keys.Length > 0) { MemoryStream blteStream = null; for (var j = 0; j < 0x10; j++) { IndexEntry idxEntry = default(IndexEntry); foreach (var k in encodingEntry.Keys) { if ((idxEntry = idxFiles[j][k.Slice(0, 9)]).Size != 0) { var dataFile = dataFiles[idxEntry.Index]; if (dataFile == null) { throw new InvalidOperationException("Invalid data file."); } yield return(Tuple.Create(entry.Key, blteStream = DataFile.LoadBLTEEntry(idxEntry, dataFile.readStream))); } } if (idxEntry.Size != 0) { break; } } if (blteStream == null) { for (var j = 0x10; j < idxFiles.Count; j++) { IndexEntry idxEntry = default(IndexEntry); foreach (var k in encodingEntry.Keys) { if ((idxEntry = idxFiles[j][k]).Size != 0) { yield return(Tuple.Create(entry.Key, DataFile.LoadBLTEEntry(idxEntry, cdnConfig.DownloadFile(indexFiles[idxEntry.Index], idxEntry)))); } } if (idxEntry.Size != 0) { break; } } } } } } } //return null; }
private void WeatherSystemTagGroup_CreateResources(ITagGroup guerillaTagGroup, IndexEntry entry, BinaryWriter writer, ref TagGroupHeader header) { List <long> addresses = new List <long>(); List <byte[]> buffers = new List <byte[]>(); // TODO: get weather system resources foreach (ITagBlock lightmapGeometrySectionBlock in ((BlockField)guerillaTagGroup[0][0]).BlockList) { ITagBlock geometry = (ITagBlock)lightmapGeometrySectionBlock[1].Value; int address = (int)geometry[1].Value; if (entry.Resources.TryGetResource(address, out var resource)) { header.TagResourceCount++; addresses.Add(address); buffers.Add(resource.GetBuffer()); } } if (header.TagResourceCount > 0) { header.RawOffsetsOffset = (uint)writer.BaseStream.Position; for (int i = 0; i < header.TagResourceCount; i++) { writer.Write((int)addresses[i]); } header.RawLengthsOffset = (uint)writer.BaseStream.Position; for (int i = 0; i < header.TagResourceCount; i++) { writer.Write(buffers[i].Length); } header.RawDataOffset = (uint)writer.BaseStream.Position; for (int i = 0; i < header.TagResourceCount; i++) { writer.Write(buffers[i]); } } }
private void DecoratorSetTagGroup_CreateResources(ITagGroup guerillaTagGroup, IndexEntry entry, BinaryWriter writer, ref TagGroupHeader header) { List <long> addresses = new List <long>(); List <byte[]> buffers = new List <byte[]>(); ITagBlock geometry = (ITagBlock)guerillaTagGroup[0][8].Value; int address = (int)geometry[1].Value; if (entry.Resources.TryGetResource(address, out var resource)) { header.TagResourceCount++; addresses.Add(address); buffers.Add(resource.GetBuffer()); } if (header.TagResourceCount > 0) { header.RawOffsetsOffset = (uint)writer.BaseStream.Position; for (int i = 0; i < header.TagResourceCount; i++) { writer.Write((int)addresses[i]); } header.RawLengthsOffset = (uint)writer.BaseStream.Position; for (int i = 0; i < header.TagResourceCount; i++) { writer.Write(buffers[i].Length); } header.RawDataOffset = (uint)writer.BaseStream.Position; for (int i = 0; i < header.TagResourceCount; i++) { writer.Write(buffers[i]); } } }
private void ScenarioStructureBspTagGroup_CreateResources(ITagGroup guerillaTagGroup, IndexEntry entry, BinaryWriter writer, ref TagGroupHeader header) { List <long> addresses = new List <long>(); List <byte[]> buffers = new List <byte[]>(); foreach (ITagBlock structureBspCluster in ((BlockField)guerillaTagGroup[0][19]).BlockList) { ITagBlock geometry = (ITagBlock)structureBspCluster[1].Value; int address = (int)geometry[1].Value; if (entry.Resources.TryGetResource(address, out var resource)) { header.TagResourceCount++; addresses.Add(address); buffers.Add(resource.GetBuffer()); } } foreach (ITagBlock structureBspInstancedGeometryDefinition in ((BlockField)guerillaTagGroup[0][39]).BlockList) { ITagBlock renderInfo = (ITagBlock)structureBspInstancedGeometryDefinition[0].Value; ITagBlock geometry = (ITagBlock)renderInfo[1].Value; int address = (int)geometry[1].Value; if (entry.Resources.TryGetResource(address, out var resource)) { header.TagResourceCount++; addresses.Add(address); buffers.Add(resource.GetBuffer()); } } foreach (ITagBlock structureBspWaterDefinition in ((BlockField)guerillaTagGroup[0][50]).BlockList) { ITagBlock geometry = (ITagBlock)structureBspWaterDefinition[2].Value; int address = (int)geometry[1].Value; if (entry.Resources.TryGetResource(address, out var resource)) { header.TagResourceCount++; addresses.Add(address); buffers.Add(resource.GetBuffer()); } } foreach (ITagBlock decoratorPlacementDefinition in ((BlockField)guerillaTagGroup[0][54]).BlockList) { foreach (ITagBlock decoratorCacheBlock in ((BlockField)decoratorPlacementDefinition[2]).BlockList) { ITagBlock geometry = (ITagBlock)decoratorCacheBlock[0].Value; int address = (int)geometry[1].Value; if (entry.Resources.TryGetResource(address, out var resource)) { header.TagResourceCount++; addresses.Add(address); buffers.Add(resource.GetBuffer()); } } } if (header.TagResourceCount > 0) { header.RawOffsetsOffset = (uint)writer.BaseStream.Position; for (int i = 0; i < header.TagResourceCount; i++) { writer.Write((int)addresses[i]); } header.RawLengthsOffset = (uint)writer.BaseStream.Position; for (int i = 0; i < header.TagResourceCount; i++) { writer.Write(buffers[i].Length); } header.RawDataOffset = (uint)writer.BaseStream.Position; for (int i = 0; i < header.TagResourceCount; i++) { writer.Write(buffers[i]); } } }
public BinaryReader DownloadFile(string archive, IndexEntry indexEntry) { throw new NotImplementedException("Client data folder is incomplete."); }
private void ScenarioStructureLightmapTagGroup_CreateResources(ITagGroup guerillaTagGroup, IndexEntry entry, BinaryWriter writer, ref TagGroupHeader header) { List <long> addresses = new List <long>(); List <byte[]> buffers = new List <byte[]>(); foreach (ITagBlock structureLightmapGroup in ((BlockField)guerillaTagGroup[0][16]).BlockList) { foreach (ITagBlock cluster in ((BlockField)structureLightmapGroup[6]).BlockList) { ITagBlock geometry = (ITagBlock)cluster[1].Value; int address = (int)geometry[1].Value; if (entry.Resources.TryGetResource(address, out var resource)) { header.TagResourceCount++; addresses.Add(address); buffers.Add(resource.GetBuffer()); } } foreach (ITagBlock lightmapGeometrySectionBlock in ((BlockField)structureLightmapGroup[8]).BlockList) { ITagBlock geometry = (ITagBlock)lightmapGeometrySectionBlock[1].Value; int address = (int)geometry[1].Value; if (entry.Resources.TryGetResource(address, out var resource)) { header.TagResourceCount++; addresses.Add(address); buffers.Add(resource.GetBuffer()); } } foreach (ITagBlock lightmapVertexBufferBucket in ((BlockField)structureLightmapGroup[10]).BlockList) { ITagBlock geometry = (ITagBlock)lightmapVertexBufferBucket[3].Value; int address = (int)geometry[1].Value; if (entry.Resources.TryGetResource(address, out var resource)) { header.TagResourceCount++; addresses.Add(address); buffers.Add(resource.GetBuffer()); } } } if (header.TagResourceCount > 0) { header.RawOffsetsOffset = (uint)writer.BaseStream.Position; for (int i = 0; i < header.TagResourceCount; i++) { writer.Write((int)addresses[i]); } header.RawLengthsOffset = (uint)writer.BaseStream.Position; for (int i = 0; i < header.TagResourceCount; i++) { writer.Write(buffers[i].Length); } header.RawDataOffset = (uint)writer.BaseStream.Position; for (int i = 0; i < header.TagResourceCount; i++) { writer.Write(buffers[i]); } } }
public bool TryGetOldestEntry(uint stream, out IndexEntry entry) { entry = InvalidIndexEntry; return(false); }