public CacheFile(string Filename, string Build) : base(Filename, Build) { Reader.EndianType = EndianFormat.LittleEndian; Version = DefinitionSet.Halo1PC; Header = new CacheHeader(this); IndexHeader = new CacheIndexHeader(this); IndexItems = new IndexTable(this); Strings = new StringTable(this); LocaleTables = new List<LocaleTable>(); }
public CacheFile(string Filename, string Build) : base(Filename, Build) { Version = DefinitionSet.Halo3; Header = new CacheHeader(this); IndexHeader = new CacheIndexHeader(this); IndexItems = new IndexTable(this); Strings = new StringTable(this); LocaleTables = new List<LocaleTable>(); try { for (int i = 0; i < int.Parse(buildNode.Attributes["languageCount"].Value); i++) LocaleTables.Add(new LocaleTable(this, (GameLanguage)i)); } catch { LocaleTables.Clear(); } }
/// <summary> /// Search results for given index key. /// </summary> /// <param name="index">Index Name</param> /// <param name="key">Index key</param> /// <param name="manager">Index manager</param> /// <returns>IEnumerable search results</returns> private IEnumerable <Dictionary <string, object> > GetIndexedEnumerable(string index, object key, IndexManager <object, Segment> manager = null) { IndexManager <object, Segment> workingIndexManager = manager != null ? manager : this.indexManager; IndexTable <object, Segment> table = workingIndexManager.GetMap(index, false); List <Segment> segments = table.GetMap(key, false); this.csvReader.Reset(); List <Dictionary <string, object> > dataset = null; foreach (Segment segment in segments) { this.csvReader.GoToSegment(segment); if ((dataset = this.csvReader.GetNextRows(1))?.Count > 0) { foreach (var row in dataset) { yield return(row); } } } }
public CacheFileGen2(GameCache cacheContext, FileInfo file, CacheVersion version, bool memory) : base(cacheContext, file, version, memory) { if (file.Name == "mainmenu.map" && MainMenuCache?.File.FullName != file.FullName) { MainMenuCache = this; } else if (file.Name == "shared.map" && SharedCache?.File.FullName != file.FullName) { SharedCache = this; } else if (file.Name == "single_player_shared.map" && SharedCache?.File.FullName != file.FullName) { SinglePlayerSharedCache = this; } IndexHeader = new CacheIndexHeader(this); IndexItems = new IndexTable(this); Strings = new StringTable(this); LocaleTables = new List <LocaleTable>(); }
public override void Initialize(ArrayList attribList) { IIndexStore store = null; if (attribList != null && attribList.Count > 0) { IEnumerator e = attribList.GetEnumerator(); while (e.MoveNext()) { string attribName = e.Current.ToString(); if (commonRbStores != null && commonRbStores.ContainsKey(_type + ":" + attribName)) { HashStore commonStore = (HashStore)commonRbStores[_type + ":" + attribName]; IndexTable.Add(attribName, commonStore); } else { store = new HashStore(); IndexTable.Add(attribName, store); } } if (commonRbStores != null && commonRbStores.ContainsKey(TAG_INDEX_KEY)) { store = (HashStore)commonRbStores[TAG_INDEX_KEY]; IndexTable.Add(TAG_INDEX_KEY, store); } else { store = new HashStore(); IndexTable.Add(TAG_INDEX_KEY, store); } } if (!IndexTable.ContainsKey(TAG_INDEX_KEY) && commonRbStores != null && commonRbStores.ContainsKey(TAG_INDEX_KEY)) { store = (HashStore)commonRbStores[TAG_INDEX_KEY]; IndexTable.Add(TAG_INDEX_KEY, store); } }
public override void RemoveFromIndex(object key, object value) { lock (_mutex) { Hashtable attributeValues = value as Hashtable; IDictionaryEnumerator valuesDic = attributeValues.GetEnumerator(); while (valuesDic.MoveNext()) { string indexKey = (string)valuesDic.Key; if (IndexTable.Contains(indexKey) || IndexTable.Contains(indexKey = ConvertToNamedTagKey(indexKey))) { IIndexStore store = IndexTable[indexKey] as IIndexStore; object val = valuesDic.Value; if (val != null) { store.Remove(val, key); } else { store.Remove("null", key); } if (store.Count == 0) { if (indexKey == TAG_INDEX_KEY || IsNamedTagKey(indexKey)) { IndexTable.Remove(indexKey); } else { IndexTable[indexKey] = new HashStore(); } } } } } }
// UseIndex() translates an ItemVar (for now, as long as it's a number) into another number that will be saved to the item at the end of the query. void UseIndex() { foreach (XElement index in IndexTable.Elements()) { if ((string)index.Attribute("type") == "number") { int indexNum = (int)ItemVars.Element(index.Attribute("tableVar").Value); IEnumerable <XElement> indexToSelect = from indexEle in index.Elements() where indexNum == (int)indexEle.Attribute("index") select indexEle; if (indexToSelect.Count() > 0) { XElement selected = indexToSelect.FirstOrDefault(); string itemVar = (string)selected; int amt = (int)selected.Attribute("amt"); int itemNum = (int)ItemVars.Element(itemVar); ItemVars.Element(itemVar).Value = (itemNum + amt).ToString(); } } } }
/// <summary> /// Read data and populate the fields /// </summary> public void Read() { using (Stream stream = new FileStream(Path, FileMode.Open, FileAccess.Read, FileShare.Read)) { using (BinaryReader reader = new BinaryReader(stream)) { // Header block Header = new HeaderBlock { Magic = reader.ReadChars(8), Unknown1 = reader.ReadByte(), FileVersionIdentifier = reader.ReadInt32(), OffsetToDataHeader = reader.ReadUInt64() }; // skip to DataHeader1 stream.Position = (int)Header.OffsetToDataHeader; // Data Header 1 DataHeader1 = new DataHeader1Block { NumOfEntries = reader.ReadInt32(), Unknown1 = Helpers.ReadInt32s(reader, 4), Unknown2 = reader.ReadInt64(), MaxFilesForThisIndex = reader.ReadInt32(), Unknown3 = reader.ReadInt32(), OffsetToData = reader.ReadInt64() }; // skip to DataHeader2 stream.Position = (int)DataHeader1.OffsetToData; // Data Header 2 DataHeader2 = new DataHeader2Block { IndexCount = reader.ReadInt32(), Unknown1 = reader.ReadInt32(), OffsetToIndexTable = reader.ReadInt64(), OffsetToNextDataSection = reader.ReadInt64(), IndexStart = reader.ReadInt32(), IndexEnd = reader.ReadInt32(), OffsetToNameTable = reader.ReadInt64(), Unknown2 = reader.ReadInt64() }; // File Entries FileEntries = new FileEntry[DataHeader2.IndexCount]; // Index Table Indices = new IndexTable[DataHeader2.IndexCount]; for (int i = 0; i < DataHeader2.IndexCount; i++) { Indices[i] = new IndexTable { OffsetToRawDataTable = reader.ReadInt64(), FileDataID = reader.ReadInt64(), RawDataSize = reader.ReadInt32() }; FileEntries[i].IndexTable = Indices[i]; } // skip to Name Table stream.Position = DataHeader2.OffsetToNameTable; // Name Table Names = new NameTable[DataHeader2.IndexCount]; for (int i = 0; i < DataHeader2.IndexCount; i++) { Names[i] = new NameTable { RawDataSize = reader.ReadInt32(), FileDataID = reader.ReadInt64(), Unknown1 = reader.ReadInt32(), ResourceIdentifier = reader.ReadUInt32(), Unknown2 = Helpers.ReadInt32s(reader, 2), NextFileCount = reader.ReadInt32(), PreviousFileCount = reader.ReadInt32(), Unknown3 = reader.ReadInt32(), Timestamp = reader.ReadInt32(), Name = new string(reader.ReadChars(128)), Unknown4 = Helpers.ReadInt32s(reader, 5) }; // remove non-ASCII characters from the name Names[i].Name = Regex.Replace(Names[i].Name, @"[^\u0020-\u007E]", string.Empty); FileEntries[i].NameTable = Names[i]; } // alphabetically sort NameTables Array.Sort(FileEntries, new Comparison <FileEntry>((x, y) => { return(x.NameTable.Name.CompareTo(y.NameTable.Name)); })); isFullyRead = true; } } }
private static SubMesh ConvertSubMeshFromAiNode(Ai.Node aiNode, Ai.Scene aiScene, Matrix4x4 parentTransformation, Dictionary <string, int> boneMap, List <Bone> bones, Dictionary <string, int> materialMap, List <Material> materials, string texturesDirectory, TextureSet textureSet) { if (!aiNode.HasMeshes) { return(null); } // Select meshes that have triangles var aiMeshes = aiNode.MeshIndices.Select(x => aiScene.Meshes[x]).Where(x => x.PrimitiveType == Ai.PrimitiveType.Triangle && x.Faces.Any(y => y.IndexCount == 3)).ToList(); if (aiMeshes.Count == 0) { return(null); } var transformation = parentTransformation * aiNode.Transform.ToNumericsTransposed(); int vertexCount = aiMeshes.Sum(x => x.VertexCount); var subMesh = new SubMesh { Name = aiNode.Name, Vertices = new Vector3[vertexCount], }; int vertexOffset = 0; foreach (var aiMesh in aiMeshes) { for (int i = 0; i < aiMesh.Vertices.Count; i++) { subMesh.Vertices[vertexOffset + i] = Vector3.Transform(aiMesh.Vertices[i].ToNumerics(), transformation); } if (aiMesh.HasNormals) { if (subMesh.Normals == null) { subMesh.Normals = new Vector3[vertexCount]; } for (int i = 0; i < aiMesh.Normals.Count; i++) { subMesh.Normals[vertexOffset + i] = Vector3.Normalize(Vector3.TransformNormal(aiMesh.Normals[i].ToNumerics(), transformation)); } } if (aiMesh.HasTangentBasis) { if (subMesh.Tangents == null) { subMesh.Tangents = new Vector4[vertexCount]; } for (int i = 0; i < aiMesh.Tangents.Count; i++) { Vector3 tangent = Vector3.Normalize(Vector3.TransformNormal(aiMesh.Tangents[i].ToNumerics(), transformation)); Vector3 bitangent = Vector3.Normalize(Vector3.TransformNormal(aiMesh.BiTangents[i].ToNumerics(), transformation)); int direction = Math.Sign(Vector3.Dot(bitangent, Vector3.Normalize(Vector3.Cross(subMesh.Normals[vertexOffset + i], tangent)))); subMesh.Tangents[vertexOffset + i] = new Vector4(tangent, direction); } } if (aiMesh.HasTextureCoords(0)) { if (subMesh.UVChannel1 == null) { subMesh.UVChannel1 = new Vector2[vertexCount]; } for (int i = 0; i < aiMesh.TextureCoordinateChannels[0].Count; i++) { subMesh.UVChannel1[vertexOffset + i] = new Vector2(aiMesh.TextureCoordinateChannels[0][i].X, 1f - aiMesh.TextureCoordinateChannels[0][i].Y); } } if (aiMesh.HasTextureCoords(1)) { if (subMesh.UVChannel2 == null) { subMesh.UVChannel2 = new Vector2[vertexCount]; } for (int i = 0; i < aiMesh.TextureCoordinateChannels[1].Count; i++) { subMesh.UVChannel2[vertexOffset + i] = new Vector2(aiMesh.TextureCoordinateChannels[1][i].X, 1f - aiMesh.TextureCoordinateChannels[1][i].Y); } } if (aiMesh.HasVertexColors(0)) { if (subMesh.Colors == null) { subMesh.Colors = new Color[vertexCount]; for (int i = 0; i < subMesh.Colors.Length; i++) { subMesh.Colors[i] = Color.White; } } for (int i = 0; i < aiMesh.VertexColorChannels[0].Count; i++) { subMesh.Colors[vertexOffset + i] = new Color(aiMesh.VertexColorChannels[0][i].R, aiMesh.VertexColorChannels[0][i].G, aiMesh.VertexColorChannels[0][i].B, aiMesh.VertexColorChannels[0][i].A); } } var indexTable = new IndexTable(); if (aiMesh.HasBones) { if (subMesh.BoneWeights == null) { subMesh.BoneWeights = new BoneWeight[vertexCount]; for (int i = 0; i < subMesh.BoneWeights.Length; i++) { subMesh.BoneWeights[i] = BoneWeight.Empty; } } indexTable.BoneIndices = new ushort[aiMesh.Bones.Count]; for (int i = 0; i < aiMesh.Bones.Count; i++) { var aiBone = aiMesh.Bones[i]; if (!boneMap.TryGetValue(aiBone.Name, out int boneIndex)) { boneIndex = bones.Count; boneMap[aiBone.Name] = boneIndex; bones.Add(ConvertBoneFromAiBone(aiBone, aiScene, boneIndex)); } indexTable.BoneIndices[i] = ( ushort )boneIndex; foreach (var aiWeight in aiBone.VertexWeights) { subMesh.BoneWeights[vertexOffset + aiWeight.VertexID].AddWeight(i, aiWeight.Weight); } } } indexTable.Indices = aiMesh.Faces.Where(x => x.IndexCount == 3).SelectMany(x => x.Indices).Select(x => ( ushort )(vertexOffset + x)).ToArray(); ushort[] triangleStrip = Stripifier.Stripify(indexTable.Indices); if (triangleStrip != null) { indexTable.PrimitiveType = PrimitiveType.TriangleStrip; indexTable.Indices = triangleStrip; } var aiMaterial = aiScene.Materials[aiMesh.MaterialIndex]; if (!materialMap.TryGetValue(aiMaterial.Name, out int materialIndex)) { materialIndex = materials.Count; materialMap[aiMaterial.Name] = materialIndex; materials.Add(ConvertMaterialFromAiMaterial(aiMaterial, texturesDirectory, textureSet)); } indexTable.MaterialIndex = materialIndex; var axisAlignedBoundingBox = new AxisAlignedBoundingBox(subMesh.Vertices.Skip(vertexOffset).Take(aiMesh.Vertices.Count)); indexTable.BoundingSphere = axisAlignedBoundingBox.ToBoundingSphere(); indexTable.BoundingBox = axisAlignedBoundingBox.ToBoundingBox(); subMesh.IndexTables.Add(indexTable); vertexOffset += aiMesh.VertexCount; } subMesh.BoundingSphere = new AxisAlignedBoundingBox(subMesh.Vertices).ToBoundingSphere(); return(subMesh); }
public string GetStringValue(string key) { IndexTable ix = IndexTables.FirstOrDefault(i => i.Name == key); return(ix != null ? ix.StringValue : string.Empty); }
public void SaveAs(string destination, bool overwrite = true) { // save the links before while (m_linksToSave.Count > 0) { D2pFile link = m_linksToSave.Dequeue(); // theorically the path is defined link.Save(); } Stream stream; if (!File.Exists(destination)) { stream = File.Create(destination); } else if (!overwrite) { throw new InvalidOperationException( "Cannot perform SaveAs : file already exist, notify overwrite parameter to true"); } else { stream = File.OpenWrite(destination); } using (var writer = new BigEndianWriter(stream)) { // header writer.WriteByte(2); writer.WriteByte(1); D2pEntry[] entries = GetEntriesOfInstanceOnly(); // avoid the header int offset = 2; foreach (D2pEntry entry in entries) { byte[] data = ReadFile(entry); entry.Index = (int)writer.Position - offset; writer.WriteBytes(data); } var entriesDefOffset = (int)writer.Position; foreach (D2pEntry entry in entries) { entry.WriteEntryDefinition(writer); } var propertiesOffset = (int)writer.Position; foreach (D2pProperty property in m_properties) { property.WriteProperty(writer); } IndexTable.OffsetBase = offset; IndexTable.EntriesCount = entries.Length; IndexTable.EntriesDefinitionOffset = entriesDefOffset; IndexTable.PropertiesCount = m_properties.Count; IndexTable.PropertiesOffset = propertiesOffset; IndexTable.Size = IndexTable.EntriesDefinitionOffset - IndexTable.OffsetBase; IndexTable.WriteTable(writer); } }
public IndexTableTag(IndexTable table) { this.Table = table; }
static List <Selectable> FindInternal <Selectable, T>(List <Selectable> result, Node <object> table, IQueryExpression <Selectable, T> expression) { TypedStream <T> typedStream = table.Open <T>(OpenMode.Read); if (typedStream == null) { throw new TypedStreamNotFoundException( string.Format("Typed stream of type {0} not found in '{1}'", typeof(T), table.Path)); } // We first obtain index table. IndexTable index2 = table.IndexTable; StreamIndexTable index = index2 != null ? index2[typeof(T)] : null; QueryFilter filter = expression.Filter; // We make sure it is disposed correcly. using (typedStream) { uint[] locations = typedStream.ObjectLocations; for (int i = 0; i < locations.Length; i++) { // We have object's location. uint primary = locations[i]; T processingObject = default(T); // 1) We now filter all objects, first by index. if ((filter & QueryFilter.Index) != 0) { Dictionary <string, object> indexData = StreamIndexTable.IndexOfObject <T>(index, typedStream, primary, out processingObject); // We now process through filter. if (!expression.IsSatisfied(primary, indexData)) { continue; } } // 2) Now we try the alternative filtering, by object. if ((filter & QueryFilter.Object) != 0) { // Force object loading, if not already by indexing. if (!object.ReferenceEquals(processingObject, null)) { processingObject = typedStream.Read(primary); } // We now process it. if (!expression.IsSatisfied(processingObject)) { continue; } } // 3) We now select it. result.Add(expression.Select(processingObject)); } } // 4) After all object are loaded, we sort them if ((filter & QueryFilter.Sort) != 0) { expression.Sort(result); } return(result); }