public string GetString(int id) { if (StringTable.ContainsKey(id)) { return(StringTable[id]); } else { return("T2$^@A"); // unpossible value } }
private void AddButtn_Click(object sender, RoutedEventArgs e) { if (!Regex.IsMatch(textBoxKey.Text, "[a-zA-Z0-9_]+")) { MessageBox.Show("Key must assemble alphabet, '_', numbers."); return; } if (stringTable.ContainsKey(textBoxKey.Text)) { MessageBox.Show("Key already added."); return; } Key = textBoxKey.Text; DialogResult = true; }
public void ConvertFromString(string valueString) { StringTable table = StringTable.Parse(valueString); if (table.ContainsKey("UserGroupIDs")) { UserGroupIDs = StringUtil.Split2 <Guid>(table["UserGroupIDs"]); } if (table.ContainsKey("TotalPoint")) { int.TryParse(table["TotalPoint"], out totalPoint); } if (table.ContainsKey("Points")) { Points = StringUtil.Split <int>(table["Points"]); } if (table.ContainsKey("TotalPosts")) { int.TryParse(table["TotalPosts"], out totalPosts); } if (table.ContainsKey("OnlineTime")) { int.TryParse(table["OnlineTime"], out onlineTime); } if (table.ContainsKey("OtherMissionIDs")) { OtherMissionIDs = StringUtil.Split2 <int>(table["OtherMissionIDs"]); } if (table.ContainsKey("MaxApplyCount")) { int.TryParse(table["MaxApplyCount"], out maxApplyCount); } }
private void Parse(MemoryStream stream, string file) { ParseWDC1(stream, file); return; #pragma warning disable CS0162 // Unreachable code detected stream.Position = 0; using (var dbReader = new BinaryReader(stream, Encoding.UTF8)) { DBHeader header = ExtractHeader(dbReader); if (header == null) { return; } long pos = dbReader.BaseStream.Position; int CopyTableSize = header.CopyTableSize; //Only WDB5 has a copy table uint CommonDataTableSize = header.CommonDataTableSize; //StringTable - only if applicable long copyTablePos = dbReader.BaseStream.Length - CommonDataTableSize - CopyTableSize; long indexTablePos = copyTablePos - (header.HasIndexTable ? header.RecordCount * 4 : 0); long wch7TablePos = indexTablePos - (header.UnknownWCH7 * 4); long stringTableStart = wch7TablePos - header.StringBlockSize; Dictionary <int, string> StringTable = new Dictionary <int, string>(); if (!header.HasOffsetTable) { dbReader.Scrub(stringTableStart); StringTable = new StringTable().Read(dbReader, stringTableStart, stringTableStart + header.StringBlockSize); dbReader.Scrub(pos); } Dictionary <int, FieldType> FieldTypes = new Dictionary <int, FieldType>() { { 4, FieldType.UNKNOWN }, { 3, FieldType.INT }, { 2, FieldType.USHORT }, { 1, FieldType.BYTE }, }; //Read data List <byte[]> copytabledata = new List <byte[]>(); if (header.IsTypeOf <WDB6>()) { copytabledata = (header as WDB6).ReadOffsetData(dbReader, pos).Values.ToList(); } else { copytabledata = (header as WDB5).ReadOffsetData(dbReader, pos).Values.ToList(); } //String table bool stringtableused = StringTable.Values.Any(x => !string.IsNullOrWhiteSpace(x)) && !header.HasOffsetTable; //Calculate known field types List <FieldInfo> fields = new List <FieldInfo>(); for (int i = 0; i < header.FieldStructure.Count; i++) { int bytecount = header.FieldStructure[i].ByteCount; FieldInfo fi = new FieldInfo(); fi.ArraySize = GetArraySize(ref header, i); if (i == header.IdIndex) { fi.Type = FieldType.INT; } else { fi.Type = FieldTypes[bytecount]; } //WDB6 Common Data check if (header.FieldStructure[i].CommonDataColumn) { switch (header.FieldStructure[i].CommonDataType) { case 0: fi.Type = FieldType.STRING; break; case 1: fi.Type = FieldType.USHORT; break; case 2: fi.Type = FieldType.BYTE; break; case 3: fi.Type = FieldType.FLOAT; break; case 4: fi.Type = FieldType.INT; break; } } fields.Add(fi); } //Attempt to figure out unknown types for (int i = 0; i < fields.Count; i++) { if (fields[i].Type != FieldType.UNKNOWN) { continue; } List <FieldType> options = new List <FieldType>() { FieldType.INT, FieldType.UINT, FieldType.FLOAT, FieldType.STRING }; if (!stringtableused) { options.Remove(FieldType.STRING); //Stringtable not used } List <int> intvals = new List <int>(); List <string> stringvals = new List <string>(); List <float> floatvals = new List <float>(); for (int d = 0; d < copytabledata.Count; d++) { byte[] cdata = copytabledata[d]; int start = header.FieldStructure[i].Offset; if (header.HasOffsetTable) { start = 0; for (int x = 0; x < i; x++) { if (fields[x].Type != FieldType.STRING) { int bytecount = header.FieldStructure[x].ByteCount; start += bytecount * fields[x].ArraySize; } else { start += cdata.Skip(start).TakeWhile(b => b != 0).Count() + 1; } } } byte[] data = cdata.Skip(start).Take(4).ToArray(); if (!header.HasOffsetTable && data.All(x => x == 0)) { continue; //Ignore 0 byte columns as they could be anything } //Get int value int intval = BitConverter.ToInt32(data, 0); intvals.Add(intval); //String check if (options.Contains(FieldType.STRING)) { if (header.HasOffsetTable) { //Check for control and nonunicode chars string stringval = Encoding.UTF8.GetString(cdata.Skip(start).TakeWhile(x => x != 0).ToArray()); if (stringval.Length >= 1 && stringval.Any(x => char.IsControl(x) || x == 0xFFFD)) { options.Remove(FieldType.STRING); } else { stringvals.Add(stringval); } } else { //Check it is in the stringtable and more than -1 if (intval < 0 || !StringTable.ContainsKey(intval)) { options.Remove(FieldType.STRING); } } } //Float check if (options.Contains(FieldType.FLOAT)) { //Basic float checks float single = BitConverter.ToSingle(data, 0); if (!float.IsInfinity(single) && !float.IsNaN(single) && (single >= 9.99999997475243E-07 && single <= 100000.0)) { floatvals.Add(single); } else if (single != 0) //Ignore 0s { options.Remove(FieldType.FLOAT); } } //UInt check if (options.Contains(FieldType.UINT) && intval < 0) //If less than 0 must be signed { options.Remove(FieldType.UINT); } } var uniquestr = new HashSet <string>(stringvals); var uniqueint = new HashSet <int>(intvals); var uniquefloat = new HashSet <float>(floatvals); if (uniqueint.Count == 1 && uniqueint.First() == 0) //All 0s { fields[i].Type = FieldType.INT; } else if (!header.HasOffsetTable && options.Contains(FieldType.STRING)) //Int if only 1 Int else String { fields[i].Type = (uniqueint.Count == 1 ? FieldType.INT : FieldType.STRING); } else if (header.HasOffsetTable && options.Contains(FieldType.STRING) && uniquestr.Count > 1) //More than 1 string { fields[i].Type = FieldType.STRING; } else if (header.HasOffsetTable && options.Contains(FieldType.STRING) && uniquefloat.Count <= 1) //1 or less float and string { fields[i].Type = FieldType.STRING; } else if (options.Contains(FieldType.FLOAT) && floatvals.Count > 0) //Floats count more than 1 { fields[i].Type = FieldType.FLOAT; } else if (options.Contains(FieldType.UINT)) //Uint over Int { fields[i].Type = FieldType.UINT; } else { fields[i].Type = FieldType.INT; } } Table table = new Table(); table.Name = Path.GetFileNameWithoutExtension(file); table.Fields = new List <Field>(); string format = $"X{header.FieldStructure.Max(x => x.Offset).ToString().Length}"; //X2, X3 etc for (int i = 0; i < fields.Count; i++) { Field field = new Field(); field.Name = (i == header.IdIndex ? "ID" : $"field{header.FieldStructure[i].Offset.ToString(format)}"); field.IsIndex = (i == header.IdIndex); field.ArraySize = (field.IsIndex ? 1 : fields[i].ArraySize); field.Type = fields[i].Type.ToString().ToLower(); table.Fields.Add(field); Console.WriteLine($"Name: {field.Name} | Array: {field.ArraySize} | Type: {field.Type}"); } tables.Add(table); Database.ForceGC(); } #pragma warning restore CS0162 // Unreachable code detected }
private void ParseWDC1(MemoryStream stream, string file) { stream.Position = 0; Dictionary <int, FieldType> FieldTypes = new Dictionary <int, FieldType>() { { 8, FieldType.ULONG }, { 4, FieldType.INT }, { 2, FieldType.USHORT }, { 1, FieldType.BYTE }, }; using (var dbReader = new BinaryReader(stream, Encoding.UTF8)) { WDC1 header = ExtractHeader(dbReader) as WDC1; if (header == null) { return; } if (header.RecordCount == 0 || header.RecordSize == 0) { return; } long pos = dbReader.BaseStream.Position; Dictionary <int, string> StringTable = new StringTable().Read(dbReader, pos, pos + header.StringBlockSize); bool stringtableused = StringTable.Values.Any(x => !string.IsNullOrWhiteSpace(x)) && !header.HasOffsetTable; List <FieldInfo> fields = new List <FieldInfo>(); var copyTable = header.ReadOffsetData(dbReader, dbReader.BaseStream.Position); for (int f = 0; f < header.ColumnMeta.Count; f++) { FieldType byteType; if (f == header.IdIndex || (f == 0 && header.HasIndexTable)) { fields.Add(new FieldInfo() { ArraySize = 1, Type = FieldType.INT }); continue; } if (header.ColumnMeta[f].CompressionType == CompressionType.None) { int bitSize = header.FieldStructure[f].BitCount; byteType = FieldTypes[NextPow2(~~(bitSize + 7) / 8)]; } else if (header.ColumnMeta[f].CompressionType > CompressionType.Immediate) { byteType = FieldType.INT; } else { byteType = FieldTypes[NextPow2(~~(header.ColumnMeta[f].BitWidth + 7) / 8)]; } fields.Add(new FieldInfo() { ArraySize = header.ColumnMeta[f].ArraySize, Type = byteType == FieldType.INT ? FieldType.UNKNOWN : byteType }); } int offset = 0; for (int i = 0; i < fields.Count; i++) { switch (fields[i].Type) { case FieldType.BYTE: offset++; continue; case FieldType.USHORT: offset += 2; continue; case FieldType.INT: offset += 4; continue; case FieldType.ULONG: offset += 8; continue; } List <FieldType> options = new List <FieldType>() { FieldType.INT, FieldType.FLOAT, FieldType.STRING }; if (!stringtableused) { options.Remove(FieldType.STRING); //Stringtable not used } List <int> ints = new List <int>(); List <float> floats = new List <float>(); foreach (var c in copyTable) { for (int x = 0; x < fields[i].ArraySize; x++) { int asInt = BitConverter.ToInt32(c.Value.Skip(offset + (4 * x)).Take(4).ToArray(), 0); if (asInt > 0) { ints.Add(asInt); if (FloatUtil.IsLikelyFloat(asInt)) { floats.Add(BitConverter.ToSingle(BitConverter.GetBytes(asInt), 0)); } } } } // remove 0's as they could be anything - if all removed then guess its an int ints.RemoveAll(x => x == 0); if (ints.Count == 0) { fields[i].Type = FieldType.INT; offset += (4 * fields[i].ArraySize); continue; } // stringtable doesn't contain string so cant be a string if (options.Contains(FieldType.STRING) && ints.Any(x => !StringTable.ContainsKey(x))) { options.Remove(FieldType.STRING); } if (floats.Count / (float)ints.Count >= 0.85) { fields[i].Type = FieldType.FLOAT; } else if (options.Contains(FieldType.STRING)) { fields[i].Type = FieldType.STRING; } else if (header.ColumnMeta[i].CompressionType == CompressionType.Immediate && header.ColumnMeta[i].Cardinality == 0) { fields[i].Type = FieldType.UINT; } else { fields[i].Type = FieldType.INT; } offset += (4 * fields[i].ArraySize); } Table table = new Table(); table.Name = Path.GetFileNameWithoutExtension(file); table.Fields = new List <Field>(); string format = $"X{fields.Count.ToString("X").Length}"; //X2, X3 etc for (int i = 0; i < fields.Count; i++) { if (header.RelationshipCount > 0 && i == fields.Count - 1) { continue; } Field field = new Field(); field.Name = (i == header.IdIndex ? "ID" : $"field{i.ToString(format)}"); field.IsIndex = (i == header.IdIndex); field.ArraySize = (field.IsIndex ? 1 : fields[i].ArraySize); field.Type = fields[i].Type.ToString().ToLower(); table.Fields.Add(field); Console.WriteLine($"Name: {field.Name} | Array: {field.ArraySize} | Type: {field.Type}"); } tables.Add(table); Database.ForceGC(); } }
public void ConvertFromString(string valueString) { StringTable table = StringTable.Parse(valueString); if (table.ContainsKey("PrizeTypes")) { PrizeTypes = StringUtil.Split2 <MissionPrizeType>(table["PrizeTypes"]); } if (table.ContainsKey("Points")) { Points = StringUtil.Split <int>(table["Points"]); } Guid[] userGroupIDs = null; if (table.ContainsKey("UserGroupIDs")) { userGroupIDs = StringUtil.Split <Guid>(table["UserGroupIDs"]); } if (userGroupIDs != null && userGroupIDs.Length > 0 && table.ContainsKey("UserGroupActiveTimes")) { long[] times = StringUtil.Split <long>(table["UserGroupActiveTimes"]); for (int i = 0; i < userGroupIDs.Length; i++) { long time; if (times.Length > i) { time = times[i]; } else { time = 0; } UserGroups.Add(userGroupIDs[i], time); } } int[] medalIDs = null, medalLevelIDs = null; if (table.ContainsKey("MedalIDs")) { medalIDs = StringUtil.Split <int>(table["MedalIDs"]); } if (table.ContainsKey("MedalLevelIDs")) { medalLevelIDs = StringUtil.Split <int>(table["MedalLevelIDs"]); } if (medalIDs != null && medalIDs.Length > 0 && medalLevelIDs != null && medalLevelIDs.Length > 0 && table.ContainsKey("MedalActiveTimes")) { long[] times = StringUtil.Split <long>(table["MedalActiveTimes"]); for (int i = 0; i < medalIDs.Length; i++) { long time; if (times.Length > i) { time = times[i]; } else { time = 0; } int medalLevelID; if (medalLevelIDs.Length > i) { medalLevelID = medalLevelIDs[i]; } else { break; } PrizeMedal medal = new PrizeMedal(); medal.MedalID = medalIDs[i]; medal.MedalLevelID = medalLevelID; medal.Seconds = time; Medals.Add(medal); } } if (table.ContainsKey("InviteSerialCount")) { int.TryParse(table["InviteSerialCount"], out inviteSerialCount); } if (table.ContainsKey("Props")) { StringTable propCounts = StringTable.Parse(table["Props"]); foreach (string key in propCounts.Keys) { props.Add(int.Parse(key), int.Parse(propCounts[key])); } } }
// Group: Loading Functions // __________________________________________________________________________ /* Function: Load * * Loads the configuration file and parses it. Redundant information will be simplified out, such as an Alter * Language section that applies to a language defined in the same file. * * Parameters: * * filename - The <Path> where the file is located. * fileLanguages - Returns a list of <ConfigFileLanguages> in no particular order. * fileIgnoredExtensions - Returns any ignored extensions as a string array. * errorList - If it couldn't successfully parse the file it will add error messages to this list. * * Returns: * * Whether it was able to successfully load and parse the file without any errors. */ public bool Load(Path filename, out List <ConfigFileLanguage> fileLanguages, out List <string> fileIgnoredExtensions, Errors.ErrorList errorList) { fileLanguages = new List <ConfigFileLanguage>(); fileIgnoredExtensions = new List <string>(); StringTable <ConfigFileLanguage> fileLanguageNames = new StringTable <ConfigFileLanguage>(Engine.Languages.Manager.KeySettingsForLanguageName); int previousErrorCount = errorList.Count; using (ConfigFile file = new ConfigFile()) { // Can't make identifiers lowercase here or we'd lose the case of the comment type in prototype ender lines. bool openResult = file.Open(filename, ConfigFile.FileFormatFlags.CondenseIdentifierWhitespace | ConfigFile.FileFormatFlags.CondenseValueWhitespace, errorList); if (openResult == false) { return(false); } string identifier, lcIdentifier, value; ConfigFileLanguage currentLanguage = null; // We need this in addition to ConfigFileLanguage.AlterLanguage because an entry altering a type defined in the // same file would be combined into the original, yet we still need to know if that entry is Alter to properly // detect whether we need to use Add/Replace with certain properties. bool alterCurrentLanguage = false; char[] space = { ' ' }; System.Text.RegularExpressions.Match match; while (file.Get(out identifier, out value)) { lcIdentifier = identifier.ToLower(); // // Ignore Extensions // if (ignoreExtensionsRegex.IsMatch(lcIdentifier)) { currentLanguage = null; string[] ignoredExtensionsArray = value.Split(space); fileIgnoredExtensions.AddRange(ignoredExtensionsArray); } // // Language // else if (lcIdentifier == "language") { if (fileLanguageNames.ContainsKey(value)) { file.AddError( Locale.Get("NaturalDocs.Engine", "Languages.txt.LanguageAlreadyExists(name)", value) ); // Continue parsing. We'll throw this into the existing language even though it shouldn't be overwriting // its values because we want to find any other errors there are in the file. currentLanguage = fileLanguageNames[value]; alterCurrentLanguage = false; } else { currentLanguage = new ConfigFileLanguage(value, false, file.LineNumber); alterCurrentLanguage = false; fileLanguages.Add(currentLanguage); fileLanguageNames.Add(value, currentLanguage); } } // // Alter Language // else if (alterLanguageRegex.IsMatch(lcIdentifier)) { // If this language already exists, collapse it into the current definition. if (fileLanguageNames.ContainsKey(value)) { currentLanguage = fileLanguageNames[value]; alterCurrentLanguage = true; } // If it doesn't exist, create the new language anyway with the alter flag set because it may exist in another // file. else { currentLanguage = new ConfigFileLanguage(value, true, file.LineNumber); alterCurrentLanguage = true; fileLanguages.Add(currentLanguage); fileLanguageNames.Add(value, currentLanguage); } } // // Aliases // else if (aliasesRegex.IsMatch(lcIdentifier)) { if (currentLanguage == null) { NeedsLanguageError(file, identifier); } else if (alterCurrentLanguage == true) { file.AddError( Locale.Get("NaturalDocs.Engine", "Languages.txt.NeedAddReplaceWhenAlteringLanguage(keyword)", "Aliases") ); } else { currentLanguage.Aliases = value.Split(space); currentLanguage.AddAliases = false; } } // // Add/Replace Aliases // else if ((match = addReplaceAliasesRegex.Match(lcIdentifier)) != null && match.Success) { if (currentLanguage == null) { NeedsLanguageError(file, identifier); } else if (alterCurrentLanguage == true && match.Groups[1].Value == "add" && currentLanguage.Aliases != null) { string[] addAliases = value.Split(space); string[] newAliases = new string[addAliases.Length + currentLanguage.Aliases.Length]; currentLanguage.Aliases.CopyTo(newAliases, 0); addAliases.CopyTo(newAliases, currentLanguage.Aliases.Length); currentLanguage.Aliases = newAliases; currentLanguage.AddAliases = true; } // Covers "replace" when altering a language, "add" and "replace" when not altering a language (no point // in adding an error when we can just tolerate it, and "replace" when altering a language that doesn't have // anything defined. else { currentLanguage.Aliases = value.Split(space); currentLanguage.AddAliases = (match.Groups[1].Value == "add"); } } // // Extensions // else if (extensionsRegex.IsMatch(lcIdentifier)) { if (currentLanguage == null) { NeedsLanguageError(file, identifier); } else if (alterCurrentLanguage == true) { file.AddError( Locale.Get("NaturalDocs.Engine", "Languages.txt.NeedAddReplaceWhenAlteringLanguage(keyword)", "Extensions") ); } else { currentLanguage.Extensions = value.Split(space); currentLanguage.AddExtensions = false; } } // // Add/Replace Extensions // else if ((match = addReplaceExtensionsRegex.Match(lcIdentifier)) != null && match.Success) { if (currentLanguage == null) { NeedsLanguageError(file, identifier); } else if (alterCurrentLanguage == true && match.Groups[1].Value == "add" && currentLanguage.Extensions != null) { string[] addExtensions = value.Split(space); string[] newExtensions = new string[addExtensions.Length + currentLanguage.Extensions.Length]; currentLanguage.Extensions.CopyTo(newExtensions, 0); addExtensions.CopyTo(newExtensions, currentLanguage.Extensions.Length); currentLanguage.Extensions = newExtensions; currentLanguage.AddExtensions = true; } // Covers "replace" when altering a language, "add" and "replace" when not altering a language (no point // in adding an error when we can just tolerate it, and "replace" when altering a language that doesn't have // anything defined. else { currentLanguage.Extensions = value.Split(space); currentLanguage.AddExtensions = (match.Groups[1].Value == "add"); } } // // Shebang Strings // else if (shebangStringsRegex.IsMatch(lcIdentifier)) { if (currentLanguage == null) { NeedsLanguageError(file, identifier); } else if (alterCurrentLanguage == true) { file.AddError( Locale.Get("NaturalDocs.Engine", "Languages.txt.NeedAddReplaceWhenAlteringLanguage(keyword)", "Shebang Strings") ); } else { currentLanguage.ShebangStrings = value.Split(space); currentLanguage.AddShebangStrings = false; } } // // Add/Replace Shebang Strings // else if ((match = addReplaceShebangStringsRegex.Match(lcIdentifier)) != null && match.Success) { if (currentLanguage == null) { NeedsLanguageError(file, identifier); } else if (alterCurrentLanguage == true && match.Groups[1].Value == "add" && currentLanguage.ShebangStrings != null) { string[] addShebangStrings = value.Split(space); string[] newShebangStrings = new string[addShebangStrings.Length + currentLanguage.ShebangStrings.Length]; currentLanguage.ShebangStrings.CopyTo(newShebangStrings, 0); addShebangStrings.CopyTo(newShebangStrings, currentLanguage.ShebangStrings.Length); currentLanguage.ShebangStrings = newShebangStrings; currentLanguage.AddShebangStrings = true; } // Covers "replace" when altering a language, "add" and "replace" when not altering a language (no point // in adding an error when we can just tolerate it, and "replace" when altering a language that doesn't have // anything defined. else { currentLanguage.ShebangStrings = value.Split(space); currentLanguage.AddShebangStrings = (match.Groups[1].Value == "add"); } } // // Simple Identifier // else if (lcIdentifier == "simple identifier") { if (currentLanguage == null) { NeedsLanguageError(file, identifier); } else if (nonASCIILettersRegex.IsMatch(value)) { file.AddError( Locale.Get("NaturalDocs.Engine", "Languages.txt.SimpleIdentifierMustOnlyBeASCIILetters(name)", value) ); } else { currentLanguage.SimpleIdentifier = value; } } // // Line Comments // else if (lineCommentsRegex.IsMatch(lcIdentifier)) { if (currentLanguage == null) { NeedsLanguageError(file, identifier); } else { currentLanguage.LineCommentStrings = value.Split(space); } } // // Block Comments // else if (blockCommentsRegex.IsMatch(lcIdentifier)) { if (currentLanguage == null) { NeedsLanguageError(file, identifier); } else { string[] newBlockCommentStrings = value.Split(space); if (newBlockCommentStrings.Length % 2 != 0) { file.AddError( Locale.Get("NaturalDocs.Engine", "Languages.txt.BlockCommentsMustHaveAnEvenNumberOfSymbols") ); } else { currentLanguage.BlockCommentStringPairs = newBlockCommentStrings; } } } // // Member Operator // else if (memberOperatorRegex.IsMatch(lcIdentifier)) { if (currentLanguage == null) { NeedsLanguageError(file, identifier); } else { currentLanguage.MemberOperator = value; } } // // Line Extender // else if (lcIdentifier == "line extender") { if (currentLanguage == null) { NeedsLanguageError(file, identifier); } else { currentLanguage.LineExtender = value; } } // // Enum Values // else if (enumValuesRegex.IsMatch(lcIdentifier)) { string lcValue = value.ToLower(); if (currentLanguage == null) { NeedsLanguageError(file, identifier); } else if (lcValue == "global") { currentLanguage.EnumValue = Language.EnumValues.Global; } else if (lcValue == "under type") { currentLanguage.EnumValue = Language.EnumValues.UnderType; } else if (lcValue == "under parent") { currentLanguage.EnumValue = Language.EnumValues.UnderParent; } else { file.AddError( Locale.Get("NaturalDocs.Engine", "Languages.txt.InvalidEnumValue(value)", value) ); } } // // Case Sensitive // else if (caseSensitiveRegex.IsMatch(lcIdentifier)) { string lcValue = value.ToLower(); if (currentLanguage == null) { NeedsLanguageError(file, identifier); } else if (yesRegex.IsMatch(lcValue)) { currentLanguage.CaseSensitive = true; } else if (noRegex.IsMatch(lcValue)) { currentLanguage.CaseSensitive = false; } else { file.AddError( Locale.Get("NaturalDocs.Engine", "Languages.txt.UnrecognizedValue(keyword, value)", "Case Sensitive", value) ); } } // // Prototype Enders // // Use identifier and not lcIdentifier to keep the case of the comment type. The regex will compensate. else if ((match = prototypeEndersRegex.Match(identifier)) != null && match.Success) { if (currentLanguage == null) { NeedsLanguageError(file, identifier); } else { string commentType = match.Groups[1].Value; string[] enderStrings = value.Split(space); currentLanguage.SetPrototypeEnderStrings(commentType, enderStrings); } } // // Deprecated keywords // else if (ignorePrefixesRegex.IsMatch(lcIdentifier) || lcIdentifier == "perl package" || lcIdentifier == "full language support") { // Ignore } // // Unrecognized keywords // else { file.AddError( Locale.Get("NaturalDocs.Engine", "Languages.txt.UnrecognizedKeyword(keyword)", identifier) ); } } // while (file.Get) file.Close(); } if (errorList.Count == previousErrorCount) { return(true); } else { return(false); } }
private void Parse(MemoryStream stream, string file) { stream.Position = 0; using (var dbReader = new BinaryReader(stream, Encoding.UTF8)) { string signature = dbReader.ReadString(4); if (signature != "WDB5") { return; } WDB5 wdb5 = new WDB5(); wdb5.ReadHeader(dbReader, signature); long pos = dbReader.BaseStream.Position; long copyTablePos = dbReader.BaseStream.Length - wdb5.CopyTableSize; long indexTablePos = copyTablePos - (wdb5.HasIndexTable ? wdb5.RecordCount * 4 : 0); long stringTableStart = indexTablePos - wdb5.StringBlockSize; Dictionary <int, string> StringTable = new Dictionary <int, string>(); if (!wdb5.HasOffsetTable) { dbReader.Scrub(stringTableStart); StringTable = new StringTable().Read(dbReader, stringTableStart, stringTableStart + wdb5.StringBlockSize); dbReader.Scrub(pos); } Dictionary <int, FieldType> FieldTypes = new Dictionary <int, FieldType>() { { 4, FieldType.UNKNOWN }, { 3, FieldType.INT }, { 2, FieldType.USHORT }, { 1, FieldType.BYTE }, }; //Calculate known field types List <FieldInfo> fields = new List <FieldInfo>(); for (int i = 0; i < wdb5.FieldStructure.Length; i++) { int bytecount = wdb5.FieldStructure[i].ByteCount; FieldInfo fi = new FieldInfo(); fi.ArraySize = GetArraySize(ref wdb5, i); if (i == wdb5.IdIndex) { fi.Type = FieldType.INT; } else { fi.Type = FieldTypes[bytecount]; } fields.Add(fi); } var copytabledata = wdb5.ReadOffsetData(dbReader, pos).Values.ToList(); bool stringtableused = StringTable.Values.Any(x => !string.IsNullOrWhiteSpace(x)) && !wdb5.HasOffsetTable; //Attempt to figure out unknown types for (int i = 0; i < fields.Count; i++) { if (fields[i].Type != FieldType.UNKNOWN) { continue; } List <FieldType> options = new List <FieldType>() { FieldType.INT, FieldType.UINT, FieldType.FLOAT, FieldType.STRING }; if (!stringtableused) { options.Remove(FieldType.STRING); //Stringtable not used } List <int> intvals = new List <int>(); List <string> stringvals = new List <string>(); List <float> floatvals = new List <float>(); for (int d = 0; d < copytabledata.Count; d++) { byte[] cdata = copytabledata[d]; int start = wdb5.FieldStructure[i].Count; if (wdb5.HasOffsetTable) { start = 0; for (int x = 0; x < i; x++) { if (fields[x].Type != FieldType.STRING) { int bytecount = wdb5.FieldStructure[x].ByteCount; start += bytecount * fields[x].ArraySize; } else { start += cdata.Skip(start).TakeWhile(b => b != 0).Count() + 1; } } } byte[] data = cdata.Skip(start).Take(4).ToArray(); if (!wdb5.HasOffsetTable && data.All(x => x == 0)) { continue; //Ignore 0 byte columns as they could be anything } //Get int value int intval = BitConverter.ToInt32(data, 0); intvals.Add(intval); //String check if (options.Contains(FieldType.STRING)) { if (wdb5.HasOffsetTable) { //Check for control and nonunicode chars string stringval = Encoding.UTF8.GetString(cdata.Skip(start).TakeWhile(x => x != 0).ToArray()); if (stringval.Length >= 1 && stringval.Any(x => char.IsControl(x) || x == 0xFFFD)) { options.Remove(FieldType.STRING); } else { stringvals.Add(stringval); } } else { //Check it is in the stringtable and more than -1 if (intval < 0 || !StringTable.ContainsKey(intval)) { options.Remove(FieldType.STRING); } } } //Float check if (options.Contains(FieldType.FLOAT)) { //Basic float checks float single = BitConverter.ToSingle(data, 0); if (!float.IsInfinity(single) && !float.IsNaN(single) && (single >= 9.99999997475243E-07 && single <= 100000.0)) { floatvals.Add(single); } else if (single != 0) //Ignore 0s { options.Remove(FieldType.FLOAT); } } //UInt check if (options.Contains(FieldType.UINT)) { if (intval < 0) //If less than 0 must be signed { options.Remove(FieldType.UINT); } } } var uniquestr = new HashSet <string>(stringvals); var uniqueint = new HashSet <int>(intvals); var uniquefloat = new HashSet <float>(floatvals); if (uniqueint.Count == 1 && uniqueint.First() == 0) //All 0s { fields[i].Type = FieldType.INT; } else if (!wdb5.HasOffsetTable && options.Contains(FieldType.STRING)) //Int if only 1 Int else String { fields[i].Type = (uniqueint.Count == 1 ? FieldType.INT : FieldType.STRING); } else if (wdb5.HasOffsetTable && options.Contains(FieldType.STRING) && uniquestr.Count > 1) //More than 1 string { fields[i].Type = FieldType.STRING; } else if (wdb5.HasOffsetTable && options.Contains(FieldType.STRING) && uniquefloat.Count <= 1) //1 or less float and string { fields[i].Type = FieldType.STRING; } else if (options.Contains(FieldType.FLOAT) && floatvals.Count > 0) //Floats count more than 1 { fields[i].Type = FieldType.FLOAT; } else if (options.Contains(FieldType.UINT)) //Uint over Int { fields[i].Type = FieldType.UINT; } else { fields[i].Type = FieldType.INT; } } Table table = new Table(); table.Name = Path.GetFileNameWithoutExtension(file); table.Fields = new List <Field>(); string format = $"X{wdb5.FieldStructure.Max(x => x.Count).ToString().Length}"; //X2, X3 etc for (int i = 0; i < fields.Count; i++) { Field field = new Field(); field.Name = (i == wdb5.IdIndex ? "m_ID" : $"field{wdb5.FieldStructure[i].Count.ToString(format)}"); field.IsIndex = (i == wdb5.IdIndex); field.ArraySize = fields[i].ArraySize; field.Type = fields[i].Type.ToString().ToLower(); table.Fields.Add(field); } tables.Add(table); Database.ForceGC(); } }
internal virtual void DoParse(string text, Guid roleID) { if (text == null) { throw new ArgumentNullException("text"); } StringTable permissionItems = StringTable.Parse(text); FieldInfo[] fieldInfos = typeof(TA1).GetFields(); FieldInfo[] fieldInfosWithTarget = typeof(TA2).GetFields(); FieldInfo actionField; string actionName; byte[] ta1 = new byte[fieldInfos.Length]; byte[] ta2 = new byte[fieldInfosWithTarget.Length]; bool[] disabled_ta1 = new bool[fieldInfos.Length]; bool[] disabled_ta2 = new bool[fieldInfosWithTarget.Length]; int m1 = 0; int m2 = 0; for (int i = 0; i < fieldInfos.Length + fieldInfosWithTarget.Length; i++) { bool isTa1 = false; if (i < fieldInfos.Length) { isTa1 = true; actionField = fieldInfos[i]; actionName = actionField.Name; } else { actionField = fieldInfosWithTarget[i - fieldInfos.Length]; actionName = "?" + actionField.Name; } if (actionField.IsDefined(typeof(PermissionItemAttribute), false)) { bool isAllow; bool isDeny; bool editable; GetPermissionItemAttributeSet(roleID, actionField, out isAllow, out isDeny, out editable); if (editable && permissionItems.ContainsKey(actionName)) { string permissionText = permissionItems[actionName]; if (isTa1) { switch (permissionText) { case "2": ta1[m1] = 2; break; case "1": ta1[m1] = 1; break; default: ta1[m1] = 0; break; } } else { switch (permissionText) { case "2": ta2[m2] = 2; break; case "1": ta2[m2] = 1; break; default: ta2[m2] = 0; break; } } } else { if (isTa1) { if (isAllow) { ta1[m1] = 1; } else if (isDeny) { ta1[m1] = 2; } else { ta1[m1] = 0; } } else { if (isAllow) { ta2[m2] = 1; } else if (isDeny) { ta2[m2] = 2; } else { ta2[m2] = 0; } } } if (isTa1) { disabled_ta1[m1] = !editable; m1++; } else { disabled_ta2[m2] = !editable; m2++; } } } m_PermissionItems_ta1 = ta1; m_PermissionItems_ta2 = ta2; m_DisabledPermissionItems_ta1 = disabled_ta1; m_DisabledPermissionItems_ta2 = disabled_ta2; }