/// <summary> /// Reading ctor /// </summary> protected Node(ResReader reader, XmlTree tree, ChunkTypes expectedType) : base(reader, expectedType) { this.tree = tree; LineNumber = reader.ReadInt32(); Comment = StringPoolRef.Read(reader, tree.StringPool); }
/// <summary> /// Decodes the children of this chunk /// </summary> /// <param name="reader"></param> private void ReadChildren(DjvuReader reader) { List <IFFChunk> children = new List <IFFChunk>(); // Read in all the chunks while (reader.Position < Offset + Length + 8) { if (reader.Position % 2 == 1) { reader.Position++; } // Read the chunk ID string id = reader.ReadUTF8String(4); ChunkTypes type = IFFChunk.GetChunkType(id); // Reset the stream position reader.Position -= 4; var chunk = IFFChunk.BuildIFFChunk(reader, Document, this, type); if (chunk != null) { children.Add(chunk); } } Children = children.ToArray(); }
/* * SetUsedChunkType * public void function * * this function is used to determin what Chunk set should be assigned * to m_usedChunkTypes based on chunkType * * @returns nothing */ public void SetUsedChunkType() { switch (chunkType) { case eChunkTypes.NORMAL: m_usedChunkTypes = chunkNormal; return; case eChunkTypes.DAMAGE: m_usedChunkTypes = chunkDamage; return; case eChunkTypes.DEFENSE: m_usedChunkTypes = chunkDefense; return; case eChunkTypes.IMPASSABLE: m_usedChunkTypes = chunkImpassable; return; case eChunkTypes.NON: m_usedChunkTypes = null; return; default: return; } }
/// <summary> /// Reader ctor /// </summary> protected Chunk(ResReader reader, ChunkTypes expectedType) : this(reader) { if (type != expectedType) { throw new IOException(string.Format("Expected chunk of type 0x{0:X}, read 0x{1:X}.", (int)expectedType, (int)type)); } }
public ChunkCollection GetChunksInMemory(ChunkTypes desiredType, Objects.Location start, Objects.Location end) { if (desiredType == ChunkTypes.Merged) { throw new Exception("desiredType can not be Merged"); } return(this.GetChunksInMemory(desiredType).GetChunkCollection(start, end)); }
/// <summary> /// Требует, чтобы тип текущего куска совпадал с type и "сжирает" его. /// </summary> /// <param name="type"></param> /// <returns>Проглоченный кусок</returns> ExpressionChunk Eat(ChunkTypes type) { if (!Is(type)) { throw new SyntaxException("Expected " + type, 0); } var ch = _source[_chunkPosition]; MoveNext(); return(ch); }
public ChunkCollection GetCombinedChunks(ChunkTypes desiredType, Objects.Location start, Objects.Location end) { if (desiredType == ChunkTypes.Merged) { throw new Exception("desiredType can not be Merged"); } ChunkCollection chunks = this.GetChunksInMemory(desiredType, start, end); if (chunks.IsEmpty()) { return(this.GetChunksFromFiles(desiredType, start, end)); } Objects.Location minimapStart = this.GetAlignedMiniMapLocation(new Objects.Location(Math.Min(start.X, end.X), Math.Min(start.Y, end.Y), Math.Min(start.Z, end.Z))); Objects.Location minimapEnd = this.GetAlignedMiniMapLocation(new Objects.Location(Math.Max(start.X, end.X), Math.Max(start.Y, end.Y), Math.Max(start.Z, end.Z))); // add chunks from files if necessary for (int z = 0; z <= minimapEnd.Z - minimapStart.Z; z++) { for (int y = 0; y <= minimapEnd.Y - minimapStart.Y; y++) { for (int x = 0; x <= minimapEnd.X - minimapStart.X; x++) { Objects.Location loc = minimapStart.Offset(x, y, z); if (chunks.GetChunkFromMiniMapLocation(loc) != null) { continue; // chunk found, skip } FileInfo fi = this.GetMapFile(loc); if (!fi.Exists) { continue; } switch (desiredType) { case ChunkTypes.Cached: chunks.AddChunk(new CachedChunk(this.Client, fi)); break; case ChunkTypes.Fast: chunks.AddChunk(new FastChunk(this.Client, fi)); break; } } } } return(chunks); }
public ChunkCollection GetChunksInMemory(ChunkTypes desiredType, bool considerUnloadedChunks = false) { if (desiredType == ChunkTypes.Merged) { throw new Exception("desiredType can not be Merged"); } var chunks = new List <IChunk>(this.Client.Addresses.MiniMap.MaxEntries); int index = -1; for (int i = this.Client.Addresses.MiniMap.Start; i < this.Client.Addresses.MiniMap.End; i += this.Client.Addresses.MiniMap.Step) { index++; if (!considerUnloadedChunks) { ushort x = this.Client.Memory.ReadUInt16(i + this.Client.Addresses.MiniMap.Distances.X); if (x == this.Client.Addresses.MiniMap.NotYetLoadedValue) { continue; // chunk not loaded yet } } switch (desiredType) { case ChunkTypes.Cached: chunks.Add(new CachedChunk(this.Client, i)); break; case ChunkTypes.Fast: chunks.Add(new FastChunk(this.Client, i)); break; } //this.CachedChunksInMemory[index].UpdateData(); //chunks.Add(this.CachedChunksInMemory[index]); } return(new ChunkCollection(this.Client, chunks)); }
public ChunkCollection GetChunksFromFiles(ChunkTypes desiredType, Objects.Location start, Objects.Location end) { if (desiredType == ChunkTypes.Merged) { throw new Exception("desiredType can not be Merged"); } var chunks = new List <IChunk>(); foreach (FileInfo fi in this.GetMapFiles(start, end)) { switch (desiredType) { case ChunkTypes.Cached: chunks.Add(new CachedChunk(this.Client, fi)); break; case ChunkTypes.Fast: chunks.Add(new FastChunk(this.Client, fi)); break; } } return(new ChunkCollection(this.Client, chunks)); }
public ExpressionChunk(string originalText, ChunkTypes type, double value) { OriginalText = originalText; Value = value; ChunkType = type; }
public static ItemTypeInfo ITIFromChunk(ChunkTypes type) { return(new ItemTypeInfo(ObjectTypes.Chunk, (int)type)); }
public ModelFile(byte[] file, string filename = null) { int tmpaddr; bool be = ByteConverter.BigEndian; ByteConverter.BigEndian = false; ulong magic = ByteConverter.ToUInt64(file, 0) & FormatMask; byte version = file[7]; if (version > CurrentVersion) { throw new FormatException("Not a valid SA1MDL/SA2MDL file."); } Metadata = new Dictionary <uint, byte[]>(); Dictionary <int, string> labels = new Dictionary <int, string>(); Dictionary <int, Attach> attaches = new Dictionary <int, Attach>(); if (version < 2) { if (version == 1) { tmpaddr = ByteConverter.ToInt32(file, 0x14); if (tmpaddr != 0) { int addr = ByteConverter.ToInt32(file, tmpaddr); while (addr != -1) { labels.Add(addr, file.GetCString(ByteConverter.ToInt32(file, tmpaddr + 4))); tmpaddr += 8; addr = ByteConverter.ToInt32(file, tmpaddr); } } } switch (magic) { case SA1MDL: Format = ModelFormat.Basic; break; case SA2MDL: Format = ModelFormat.Chunk; break; default: throw new FormatException("Not a valid SA1MDL/SA2MDL file."); } Model = new NJS_OBJECT(file, ByteConverter.ToInt32(file, 8), 0, Format, labels, attaches); if (filename != null) { tmpaddr = ByteConverter.ToInt32(file, 0xC); if (tmpaddr != 0) { List <string> animfiles = new List <string>(); int addr = ByteConverter.ToInt32(file, tmpaddr); while (addr != -1) { animfiles.Add(file.GetCString(addr)); tmpaddr += 4; addr = ByteConverter.ToInt32(file, tmpaddr); } animationFiles = animfiles.ToArray(); } else { animationFiles = new string[0]; } string path = Path.GetDirectoryName(filename); List <NJS_MOTION> anims = new List <NJS_MOTION>(); try { foreach (string item in animationFiles) { anims.Add(NJS_MOTION.Load(Path.Combine(path, item), Model.CountAnimated())); } } catch { anims.Clear(); } Animations = anims.AsReadOnly(); } } else { animationFiles = new string[0]; tmpaddr = ByteConverter.ToInt32(file, 0xC); if (tmpaddr != 0) { bool finished = false; while (!finished) { ChunkTypes type = (ChunkTypes)ByteConverter.ToUInt32(file, tmpaddr); int chunksz = ByteConverter.ToInt32(file, tmpaddr + 4); int nextchunk = tmpaddr + 8 + chunksz; tmpaddr += 8; if (version == 2) { switch (type) { case ChunkTypes.Label: while (ByteConverter.ToInt64(file, tmpaddr) != -1) { labels.Add(ByteConverter.ToInt32(file, tmpaddr), file.GetCString(ByteConverter.ToInt32(file, tmpaddr + 4))); tmpaddr += 8; } break; case ChunkTypes.Animation: List <string> animfiles = new List <string>(); while (ByteConverter.ToInt32(file, tmpaddr) != -1) { animfiles.Add(file.GetCString(ByteConverter.ToInt32(file, tmpaddr))); tmpaddr += 4; } animationFiles = animfiles.ToArray(); break; case ChunkTypes.Morph: break; case ChunkTypes.Author: Author = file.GetCString(tmpaddr); break; case ChunkTypes.Tool: break; case ChunkTypes.Description: Description = file.GetCString(tmpaddr); break; case ChunkTypes.Texture: break; case ChunkTypes.End: finished = true; break; } } else { byte[] chunk = new byte[chunksz]; Array.Copy(file, tmpaddr, chunk, 0, chunksz); int chunkaddr = 0; switch (type) { case ChunkTypes.Label: while (ByteConverter.ToInt64(chunk, chunkaddr) != -1) { labels.Add(ByteConverter.ToInt32(chunk, chunkaddr), chunk.GetCString(ByteConverter.ToInt32(chunk, chunkaddr + 4))); chunkaddr += 8; } break; case ChunkTypes.Animation: List <string> animchunks = new List <string>(); while (ByteConverter.ToInt32(chunk, chunkaddr) != -1) { animchunks.Add(chunk.GetCString(ByteConverter.ToInt32(chunk, chunkaddr))); chunkaddr += 4; } animationFiles = animchunks.ToArray(); break; case ChunkTypes.Morph: break; case ChunkTypes.Author: Author = chunk.GetCString(chunkaddr); break; case ChunkTypes.Tool: break; case ChunkTypes.Description: Description = chunk.GetCString(chunkaddr); break; case ChunkTypes.End: finished = true; break; default: Metadata.Add((uint)type, chunk); break; } } tmpaddr = nextchunk; } } switch (magic) { case SA1MDL: Format = ModelFormat.Basic; break; case SA2MDL: Format = ModelFormat.Chunk; break; case SA2BMDL: Format = ModelFormat.GC; break; case XJMDL: Format = ModelFormat.XJ; break; default: throw new FormatException("Not a valid SA1MDL/SA2MDL file."); } Model = new NJS_OBJECT(file, ByteConverter.ToInt32(file, 8), 0, Format, labels, attaches); if (filename != null) { string path = Path.GetDirectoryName(filename); if (File.Exists(Path.GetFileNameWithoutExtension(filename) + ".action")) { using (TextReader tr = File.OpenText(Path.GetFileNameWithoutExtension(filename) + ".action")) { List <string> animlist = new List <string>(); int count = File.ReadLines(Path.GetFileNameWithoutExtension(filename) + ".action").Count(); for (int i = 0; i < count; i++) { string line = tr.ReadLine(); if (File.Exists(Path.Combine(path, line))) { animlist.Add(line); } } animationFiles = animlist.ToArray(); } } List <NJS_MOTION> anims = new List <NJS_MOTION>(); try { foreach (string item in animationFiles) { if (Path.GetExtension(item).ToLowerInvariant() == ".json") { JsonSerializer js = new JsonSerializer() { Culture = System.Globalization.CultureInfo.InvariantCulture }; using (TextReader tr = File.OpenText(Path.Combine(path, item))) { using (JsonTextReader jtr = new JsonTextReader(tr)) anims.Add(js.Deserialize <NJS_MOTION>(jtr)); } } else { anims.Add(NJS_MOTION.Load(Path.Combine(path, item), Model.CountAnimated())); } } } catch { anims.Clear(); } Animations = anims.AsReadOnly(); } } ByteConverter.BigEndian = be; }
/// <summary> /// Builds the appropriate chunk for the ID /// </summary> /// <returns></returns> public static IFFChunk BuildIFFChunk(DjvuReader reader, DjvuDocument rootDocument, IFFChunk parent, ChunkTypes chunkType) { IFFChunk result = null; if (chunkType == ChunkTypes.Form) { result = new FormChunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.Form_Djvm) { result = new DjvmChunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.Form_Djvu) { result = new DjvuChunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.Form_Djvi) { result = new DjviChunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.Form_Thum) { result = new ThumChunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.Dirm) { result = new DirmChunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.Navm) { result = new NavmChunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.Anta) { result = new AntaChunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.Antz) { result = new AntzChunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.Txta) { result = new TxtaChunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.Txtz) { result = new TxtzChunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.Djbz) { result = new DjbzChunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.Sjbz) { result = new SjbzChunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.FG44) { result = new FG44Chunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.BG44) { result = new BG44Chunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.TH44) { result = new TH44Chunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.WMRM) { result = new WmrmChunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.FGbz) { result = new FGbzChunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.Info) { result = new InfoChunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.Incl) { result = new InclChunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.BGjp) { result = new BGjpChunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.FGjp) { result = new FGjpChunk(reader, parent, rootDocument); } else if (chunkType == ChunkTypes.Smmr) { result = new SmmrChunk(reader, parent, rootDocument); } else { result = new UnknownChunk(reader, parent, rootDocument); } //Console.WriteLine(result); return(result); }
/// <summary> /// Creation ctor /// </summary> protected Node(XmlTree tree, ChunkTypes expectedType) : base(expectedType) { this.tree = tree; }
public Stream CreateChunk(ChunkTypes type, string chunkName) { return(m_renderingContext.CreateChunk((Microsoft.ReportingServices.ReportProcessing.ReportProcessing.ReportChunkTypes)type, chunkName)); }
/// <summary> /// Creation ctor /// </summary> protected Element(XmlTree tree, ChunkTypes expectedType) : base(tree, expectedType) { }
/// <summary> /// Reader ctor /// </summary> protected Chunk(ResReader reader) { type = (ChunkTypes)reader.ReadUInt16(); headerSize = reader.ReadUInt16(); size = reader.ReadInt32(); }
/// <summary> /// Reading ctor /// </summary> protected NamespaceNode(XmlTree tree, ChunkTypes expectedType) : base(tree, expectedType) { }
/// <summary> /// Reading ctor /// </summary> protected NamespaceNode(ResReader reader, XmlTree tree, ChunkTypes expectedType) : base(reader, tree, expectedType) { Prefix = StringPoolRef.Read(reader, tree.StringPool); Uri = StringPoolRef.Read(reader, tree.StringPool); }
public Stream GetOrCreateChunk(ChunkTypes type, string chunkName, out bool isNewChunk) { return(this.m_renderingContext.GetOrCreateChunk((AspNetCore.ReportingServices.ReportProcessing.ReportProcessing.ReportChunkTypes)type, chunkName, true, out isNewChunk)); }
public Stream CreateChunk(ChunkTypes type, string chunkName) { return(this.m_renderingContext.CreateChunk((AspNetCore.ReportingServices.ReportProcessing.ReportProcessing.ReportChunkTypes)type, chunkName)); }
public Stream GetChunk(ChunkTypes type, string chunkName) { bool isNewChunk; return(m_renderingContext.GetOrCreateChunk((Microsoft.ReportingServices.ReportProcessing.ReportProcessing.ReportChunkTypes)type, chunkName, createChunkIfNotExists: false, out isNewChunk)); }
public ExpressionChunk(string originalText, ChunkTypes type) { OriginalText = originalText; Value = 0.0; ChunkType = type; }
/// <summary> /// Reading ctor /// </summary> protected Element(ResReader reader, XmlTree tree, ChunkTypes expectedType) : base(reader, tree, expectedType) { Namespace = StringPoolRef.Read(reader, tree.StringPool); Name = StringPoolRef.Read(reader, tree.StringPool); }
/// <summary> /// Проверяет тип текущего куска. /// Просто для упрощения выражений грамматики и уменьшения количества переменных /// </summary> /// <param name="type"></param> /// <returns></returns> bool Is(ChunkTypes type) { return(_chunkPosition < _source.Count && _source[_chunkPosition].ChunkType == type); }
public Stream GetChunk(ChunkTypes type, string chunkName) { bool flag = default(bool); return(this.m_renderingContext.GetOrCreateChunk((AspNetCore.ReportingServices.ReportProcessing.ReportProcessing.ReportChunkTypes)type, chunkName, false, out flag)); }
/// <summary> /// Проверяет тип следущего куска без прибаления индекса. /// Просто для упрощения выражений грамматики и уменьшения количества переменных. /// </summary> /// <param name="type"></param> /// <returns></returns> bool IsNext(ChunkTypes type) { return(!IsEOL && _source[_chunkPosition + 1].ChunkType == type); }
public ModelFile(string filename) { int tmpaddr; bool be = ByteConverter.BigEndian; ByteConverter.BigEndian = false; byte[] file = File.ReadAllBytes(filename); ulong magic = ByteConverter.ToUInt64(file, 0) & FormatMask; byte version = file[7]; if (version > CurrentVersion) { throw new FormatException("Not a valid SA1MDL/SA2MDL file."); } Metadata = new Dictionary <uint, byte[]>(); Dictionary <int, string> labels = new Dictionary <int, string>(); if (version < 2) { if (version == 1) { tmpaddr = ByteConverter.ToInt32(file, 0x14); if (tmpaddr != 0) { int addr = ByteConverter.ToInt32(file, tmpaddr); while (addr != -1) { labels.Add(addr, file.GetCString(ByteConverter.ToInt32(file, tmpaddr + 4))); tmpaddr += 8; addr = ByteConverter.ToInt32(file, tmpaddr); } } } switch (magic) { case SA1MDL: Format = ModelFormat.Basic; break; case SA2MDL: Format = ModelFormat.Chunk; break; default: throw new FormatException("Not a valid SA1MDL/SA2MDL file."); } Model = new NJS_OBJECT(file, ByteConverter.ToInt32(file, 8), 0, Format, labels); tmpaddr = ByteConverter.ToInt32(file, 0xC); if (tmpaddr != 0) { List <string> animfiles = new List <string>(); int addr = ByteConverter.ToInt32(file, tmpaddr); while (addr != -1) { animfiles.Add(file.GetCString(addr)); tmpaddr += 4; addr = ByteConverter.ToInt32(file, tmpaddr); } animationFiles = animfiles.ToArray(); } else { animationFiles = new string[0]; } string path = Path.GetDirectoryName(filename); List <Animation> anims = new List <Animation>(); foreach (string item in animationFiles) { anims.Add(Animation.Load(Path.Combine(path, item), Model.CountAnimated())); } Animations = anims.AsReadOnly(); if (version == 1) { tmpaddr = ByteConverter.ToInt32(file, 0x10); if (tmpaddr != 0) { List <string> morphfiles = new List <string>(); int addr = ByteConverter.ToInt32(file, tmpaddr); while (addr != -1) { morphfiles.Add(file.GetCString(addr)); tmpaddr += 4; addr = ByteConverter.ToInt32(file, tmpaddr); } morphFiles = morphfiles.ToArray(); } else { morphFiles = new string[0]; } List <Animation> morphs = new List <Animation>(); foreach (string item in morphFiles) { morphs.Add(Animation.Load(Path.Combine(path, item), Model.CountMorph())); } Morphs = morphs.AsReadOnly(); } else { morphFiles = new string[0]; Morphs = new ReadOnlyCollection <Animation>(new List <Animation>()); } } else { animationFiles = new string[0]; morphFiles = new string[0]; tmpaddr = ByteConverter.ToInt32(file, 0xC); if (tmpaddr != 0) { bool finished = false; while (!finished) { ChunkTypes type = (ChunkTypes)ByteConverter.ToUInt32(file, tmpaddr); int chunksz = ByteConverter.ToInt32(file, tmpaddr + 4); int nextchunk = tmpaddr + 8 + chunksz; tmpaddr += 8; if (version == 2) { switch (type) { case ChunkTypes.Label: while (ByteConverter.ToInt64(file, tmpaddr) != -1) { labels.Add(ByteConverter.ToInt32(file, tmpaddr), file.GetCString(ByteConverter.ToInt32(file, tmpaddr + 4))); tmpaddr += 8; } break; case ChunkTypes.Animation: List <string> animfiles = new List <string>(); while (ByteConverter.ToInt32(file, tmpaddr) != -1) { animfiles.Add(file.GetCString(ByteConverter.ToInt32(file, tmpaddr))); tmpaddr += 4; } animationFiles = animfiles.ToArray(); break; case ChunkTypes.Morph: List <string> morphfiles = new List <string>(); while (ByteConverter.ToInt32(file, tmpaddr) != -1) { morphfiles.Add(file.GetCString(ByteConverter.ToInt32(file, tmpaddr))); tmpaddr += 4; } morphFiles = morphfiles.ToArray(); break; case ChunkTypes.Author: Author = file.GetCString(tmpaddr); break; case ChunkTypes.Tool: Tool = file.GetCString(tmpaddr); break; case ChunkTypes.Description: Description = file.GetCString(tmpaddr); break; case ChunkTypes.Texture: break; case ChunkTypes.End: finished = true; break; } } else { byte[] chunk = new byte[chunksz]; Array.Copy(file, tmpaddr, chunk, 0, chunksz); int chunkaddr = 0; switch (type) { case ChunkTypes.Label: while (ByteConverter.ToInt64(chunk, chunkaddr) != -1) { labels.Add(ByteConverter.ToInt32(chunk, chunkaddr), chunk.GetCString(ByteConverter.ToInt32(chunk, chunkaddr + 4))); chunkaddr += 8; } break; case ChunkTypes.Animation: List <string> animchunks = new List <string>(); while (ByteConverter.ToInt32(chunk, chunkaddr) != -1) { animchunks.Add(chunk.GetCString(ByteConverter.ToInt32(chunk, chunkaddr))); chunkaddr += 4; } animationFiles = animchunks.ToArray(); break; case ChunkTypes.Morph: List <string> morphchunks = new List <string>(); while (ByteConverter.ToInt32(chunk, chunkaddr) != -1) { morphchunks.Add(chunk.GetCString(ByteConverter.ToInt32(chunk, chunkaddr))); chunkaddr += 4; } morphFiles = morphchunks.ToArray(); break; case ChunkTypes.Author: Author = chunk.GetCString(chunkaddr); break; case ChunkTypes.Tool: Tool = chunk.GetCString(chunkaddr); break; case ChunkTypes.Description: Description = chunk.GetCString(chunkaddr); break; case ChunkTypes.End: finished = true; break; default: Metadata.Add((uint)type, chunk); break; } } tmpaddr = nextchunk; } } switch (magic) { case SA1MDL: Format = ModelFormat.Basic; break; case SA2MDL: Format = ModelFormat.Chunk; break; default: throw new FormatException("Not a valid SA1MDL/SA2MDL file."); } Model = new NJS_OBJECT(file, ByteConverter.ToInt32(file, 8), 0, Format, labels); string path = Path.GetDirectoryName(filename); List <Animation> anims = new List <Animation>(); foreach (string item in animationFiles) { anims.Add(Animation.Load(Path.Combine(path, item), Model.CountAnimated())); } Animations = anims.AsReadOnly(); List <Animation> morphs = new List <Animation>(); foreach (string item in morphFiles) { morphs.Add(Animation.Load(Path.Combine(path, item), Model.CountMorph())); } Morphs = morphs.AsReadOnly(); } ByteConverter.BigEndian = be; }
public static LandTable LoadFromFile(string filename) { bool be = ByteConverter.BigEndian; ByteConverter.BigEndian = false; byte[] file = File.ReadAllBytes(filename); ulong magic = ByteConverter.ToUInt64(file, 0) & FormatMask; byte version = file[7]; if (version > CurrentVersion) { throw new FormatException("Not a valid SA1LVL/SA2LVL file."); } Dictionary <int, string> labels = new Dictionary <int, string>(); string author = null, description = null, tool = null; Dictionary <uint, byte[]> meta = new Dictionary <uint, byte[]>(); if (version < 2) { if (version == 1) { int tmpaddr = ByteConverter.ToInt32(file, 0xC); if (tmpaddr != 0) { int addr = ByteConverter.ToInt32(file, tmpaddr); while (addr != -1) { labels.Add(addr, file.GetCString(ByteConverter.ToInt32(file, tmpaddr + 4))); tmpaddr += 8; addr = ByteConverter.ToInt32(file, tmpaddr); } } } } else { int tmpaddr = ByteConverter.ToInt32(file, 0xC); if (tmpaddr != 0) { bool finished = false; while (!finished) { ChunkTypes type = (ChunkTypes)ByteConverter.ToUInt32(file, tmpaddr); int chunksz = ByteConverter.ToInt32(file, tmpaddr + 4); int nextchunk = tmpaddr + 8 + chunksz; tmpaddr += 8; if (version == 2) { switch (type) { case ChunkTypes.Label: while (ByteConverter.ToInt64(file, tmpaddr) != -1) { labels.Add(ByteConverter.ToInt32(file, tmpaddr), file.GetCString(ByteConverter.ToInt32(file, tmpaddr + 4))); tmpaddr += 8; } break; case ChunkTypes.Author: author = file.GetCString(tmpaddr); break; case ChunkTypes.Tool: tool = file.GetCString(tmpaddr); break; case ChunkTypes.Description: description = file.GetCString(tmpaddr); break; case ChunkTypes.End: finished = true; break; } } else { byte[] chunk = new byte[chunksz]; Array.Copy(file, tmpaddr, chunk, 0, chunksz); int chunkaddr = 0; switch (type) { case ChunkTypes.Label: while (ByteConverter.ToInt64(chunk, chunkaddr) != -1) { labels.Add(ByteConverter.ToInt32(chunk, chunkaddr), chunk.GetCString(ByteConverter.ToInt32(chunk, chunkaddr + 4))); chunkaddr += 8; } break; case ChunkTypes.Author: author = chunk.GetCString(0); break; case ChunkTypes.Tool: tool = chunk.GetCString(0); break; case ChunkTypes.Description: description = chunk.GetCString(0); break; case ChunkTypes.End: finished = true; break; default: meta.Add((uint)type, chunk); break; } } tmpaddr = nextchunk; } } } if (magic == SA1LVL) { LandTable table = new LandTable(file, ByteConverter.ToInt32(file, 8), 0, LandTableFormat.SA1, labels) { Author = author, Description = description, Tool = tool, Metadata = meta }; ByteConverter.BigEndian = be; return(table); } if (magic == SA2LVL) { LandTable table = new LandTable(file, ByteConverter.ToInt32(file, 8), 0, LandTableFormat.SA2, labels) { Author = author, Description = description, Tool = tool, Metadata = meta }; ByteConverter.BigEndian = be; return(table); } ByteConverter.BigEndian = be; throw new FormatException("Not a valid SA1LVL/SA2LVL file."); }
/// <summary> /// Writer ctor /// </summary> protected Chunk(ChunkTypes type) { this.type = type; }