/// <summary> /// Reads record data from stream. /// </summary> /// <param name="reader">Reader positioned at start of record data.</param> /// <param name="length">Length of record data to read.</param> public virtual void Open(BinaryReader reader, int length) { streamPosition = reader.BaseStream.Position; streamLength = length; // Cannot read zero-length records if (length <= 0) { return; } // Peek record type and adjust for light size recordType = SaveTree.PeekRecordType(reader); if (recordType == RecordTypes.Light) { streamLength *= LightDataLengthMultiplier; } // Read raw record data streamData = reader.ReadBytes(streamLength); // Read RecordRoot data from start of memory buffer ReadRecordRoot(); }
public static byte MakeRecordType(RecordTypes type, RecordLayouts layout) { return((byte)((int)type | (((int)layout) << 4))); }
public AddSimpleRecordToList(string value, RecordTypes type) { this.value = value; this.type = type; }
public SemanticRecord(ClassEntry className) { this.className = className; this.recordType = RecordTypes.TypeName; }
public SemanticRecord(Variable variable) { this.variable = variable; this.recordType = RecordTypes.Variable; }
public SemanticRecord(RecordTypes recordType, string value) { this.recordType = recordType; this.value = value; }
protected STDFRecord(RecordTypes recordTypeCode) { RecordType = (ushort)recordTypeCode; }
// Reads all records in SaveTree // Reader must be positioned at start of first RecordElement void ReadRecords(BinaryReader reader) { RecordDictionary.Clear(); RootRecord = new SaveTreeBaseRecord(); while (reader.BaseStream.Position < reader.BaseStream.Length) { // Read record length and skip empty records as they have no data int length = reader.ReadInt32(); if (length <= 0) { continue; } // Handle potential stream overflow (e.g. corrupt save, something went wrong) if (reader.BaseStream.Position + length >= reader.BaseStream.Length) { break; } // Peek record type from RecordRoot so we can instantiate record class based on type RecordTypes type = PeekRecordType(reader); // Add record based on type SaveTreeBaseRecord record; switch (type) { case RecordTypes.Item: record = new ItemRecord(reader, length); break; case RecordTypes.Character: record = new CharacterRecord(reader, length); break; case RecordTypes.Spell: record = new SpellRecord(reader, length); break; case RecordTypes.GuildMembership: case RecordTypes.OldGuild: record = new GuildMembershipRecord(reader, length); break; case RecordTypes.DiseaseOrPoison: record = new DiseaseOrPoisonRecord(reader, length); break; case RecordTypes.TrappedSoul: record = new TrappedSoulRecord(reader, length); break; case RecordTypes.Container: record = new ContainerRecord(reader, length); break; //case RecordTypes.Door: // record = new SaveTreeBaseRecord(reader, length); // Read then skip these records for now // continue; //case RecordTypes.DungeonData: // record = new SaveTreeBaseRecord(reader, length); // Read then skip these records for now // continue; default: record = new SaveTreeBaseRecord(reader, length); break; } AddRecord(record); } LinkChildren(); }
/// <summary> /// Finds all instances of a specific record type in tree starting from root record. /// </summary> /// <param name="type">Type of record to search for.</param> /// <param name="root">Root record to start searching from. If null, will start from RecordRoot.</param> /// <returns>List of records found. May contain 0 records.</returns> public List<SaveTreeBaseRecord> FindRecords(RecordTypes type, SaveTreeBaseRecord root = null) { List<SaveTreeBaseRecord> recordList = new List<SaveTreeBaseRecord>(); if (root == null) root = RootRecord; FindRecordsByType(type, root, recordList); return recordList; }
public BulkCombineFields(RecordTypes record, Filters filter, ResourceId resource, List <string> fieldNames, HrbcRecordCreator recordCreator, List <int> userIds, List <InputType> fieldType) { new BulkCombineFields(record, filter, resource, fieldNames, recordCreator, userIds); FieldTypes = fieldType; }
public void GetLog(string warningofonfomessage, RecordTypes recordtype) { Console.WriteLine(DateTime.Now + ", " + recordtype + ": " + warningofonfomessage); }
public void GetLog(Exception e, RecordTypes recordtype = RecordTypes.Exception) { Console.WriteLine(DateTime.Now + ", " + recordtype + ": " + e.Message); }
/// <summary> /// Finds first record of type in tree starting from root record. /// </summary> /// <param name="type">Type of record to search for.</param> /// <param name="root">Root record to start searching from. If null, will start from RecordRoot.</param> /// <returns>Found item or null if not found.</returns> public SaveTreeBaseRecord FindRecord(RecordTypes type, SaveTreeBaseRecord root = null) { List<SaveTreeBaseRecord> records = FindRecords(type, root); if (records.Count == 0) return null; return records[0]; }
/// <summary> /// Filters a record list by parent type. /// </summary> /// <param name="source">Source list.</param> /// <param name="parentType">Parent type to filter for.</param> /// <returns>New list of items with specific parent type.</returns> public List<SaveTreeBaseRecord> FilterRecordsByParentType(List<SaveTreeBaseRecord> source, RecordTypes parentType) { List<SaveTreeBaseRecord> newList = new List<SaveTreeBaseRecord>(); foreach(SaveTreeBaseRecord record in source) { if (record.Parent == null) continue; if (record.Parent.RecordType == parentType) newList.Add(record); } return newList; }
public void TestBulkCreateEntriesMergeUserFieldValid(Entries entry, ResourceId resource, RecordTypes recordType, InputType fieldType) { var fieldName = FieldsCreator.Data[$"{resource}-{fieldType.ToString()}"].Field.Name; var fieldValue = DefaultValueFieldTypes[fieldType](RecordsCreator, resource, fieldName, UserIds.Data); PerformBulkCreateTests(entry, resource, new Dictionary <string, string> { [fieldName] = fieldValue }, RecordsCreator, recordType); }
public static byte[] MakeVMSExtraBlock(RecordTypes recordType, RecordLayouts layout, FileAttributes attributes, ushort recordSize, uint fileSize, byte bucketSize, ushort maxRecordSize, ushort defaultExtend, DateTime created, DateTime modified, uint ownerId, FileProtection system, FileProtection owner, FileProtection group, FileProtection world) { var headerResult = new PK_header { tag = VMSAttributeHeader, size = VMSAttributeSize, data = new List <PK_field>() }; var evenFileSize = Math.DivRem(fileSize, 512, out var fileSizeRemainder) + 1; FatDef fatDef = new FatDef { b_rtype = MakeRecordType(recordType, layout), b_rattrib = (byte)attributes, w_rsize = recordSize, l_hiblk = (uint)(((evenFileSize + 1) << 16) | ((evenFileSize + 1) >> 16)), l_efblk = (uint)((evenFileSize << 16) | (evenFileSize >> 16)), w_ffbyte = (ushort)fileSizeRemainder, b_bktsize = bucketSize, b_vfcsize = (byte)(recordType == RecordTypes.C_VFC ? 2 : 0), w_maxrec = maxRecordSize, w_defext = defaultExtend, w_gbc = 0 }; headerResult.data.Add(new PK_field { size = 32, tag = 4, value = WriteFATDef(fatDef) }); headerResult.data.Add(new PK_field { size = 4, tag = 3, value = BitConverter.GetBytes((int)0) }); headerResult.data.Add(new PK_field { size = 8, tag = 17, value = ConvertToSmithsonianTime(created) }); headerResult.data.Add(new PK_field { size = 8, tag = 18, value = ConvertToSmithsonianTime(modified) }); headerResult.data.Add(new PK_field { size = 8, tag = 19, value = BitConverter.GetBytes((long)0) }); headerResult.data.Add(new PK_field { size = 8, tag = 20, value = BitConverter.GetBytes((long)0) }); headerResult.data.Add(new PK_field { size = 2, tag = 13, value = new byte[] { 1, 0 } }); headerResult.data.Add(new PK_field { size = 4, tag = 21, value = BitConverter.GetBytes(ownerId) }); headerResult.data.Add( new PK_field { size = 2, tag = 22, value = MakeProtection(system, owner, group, world) }); headerResult.data.Add(new PK_field { size = 2, tag = 23, value = new byte[] { 0, 0 } }); headerResult.data.Add(new PK_field { size = 1, tag = 29, value = new byte[] { 0 } }); return(WriteHeader(headerResult)); }
public void TestBulkCreateEntriesMergeApplicationFieldValid(Entries entry, ResourceId resource, RecordTypes recordType, string fieldName) { var fieldValue = DefaultValueFieldTypes[BulkCombineFields.GetFieldTypeByFieldName(resource, fieldName)](RecordsCreator, resource, fieldName, UserIds.Data); PerformBulkCreateTests(entry, resource, new Dictionary <string, string> { [fieldName] = fieldValue }, RecordsCreator, recordType); }
// Recursively search for record type void FindRecordsByType(RecordTypes type, SaveTreeBaseRecord parent, List<SaveTreeBaseRecord> recordList) { if (parent.RecordType == type) recordList.Add(parent); for (int i = 0; i < parent.Children.Count; i++) { FindRecordsByType(type, parent.Children[i], recordList); } }
/// <summary> /// Filters a record list by parent type. /// </summary> /// <param name="source">Source list.</param> /// <param name="parentType">Parent type to filter for.</param> /// <returns>New list of items with specific parent type.</returns> public List <SaveTreeBaseRecord> FilterRecordsByParentType(List <SaveTreeBaseRecord> source, RecordTypes parentType) { List <SaveTreeBaseRecord> newList = new List <SaveTreeBaseRecord>(); foreach (SaveTreeBaseRecord record in source) { if (record.Parent == null) { continue; } if (record.Parent.RecordType == parentType) { newList.Add(record); } } return(newList); }
/// <summary> /// Reads record data from stream. /// </summary> /// <param name="reader">Reader positioned at start of record data.</param> /// <param name="length">Length of record data to read.</param> public virtual void Open(BinaryReader reader, int length) { streamPosition = reader.BaseStream.Position; streamLength = length; // Cannot read zero-length records if (length <= 0) return; // Peek record type and adjust for dungeon size recordType = SaveTree.PeekRecordType(reader); if (recordType == RecordTypes.DungeonInformation) streamLength *= DungeonDataLengthMultiplier; // Read raw record data streamData = reader.ReadBytes(streamLength); // Read RecordRoot data from start of memory buffer ReadRecordRoot(); }