private static void ReadFields(Tokenizer tokenizer, ICollection <GraphQueryField> fields) { tokenizer.SkipWhitespace(); if (tokenizer.NextChar != '{') { return; } tokenizer.Advance(); do { tokenizer.SkipWhitespace(); if (tokenizer.NextChar == '}') { tokenizer.Advance(); return; } var fieldName = tokenizer.ReadIdentifier(); tokenizer.SkipWhitespace(); var field = new GraphQueryField(fieldName.ToString()); if (tokenizer.NextChar == '{') { var nestedFields = new List <GraphQueryField>(); ReadFields(tokenizer, nestedFields); if (nestedFields.Count > 0) { field.NestedFields = nestedFields.ToArray(); } tokenizer.SkipWhitespace(); } fields.Add(field); if (tokenizer.NextChar == ',') { tokenizer.Advance(); } else if (tokenizer.NextChar != '}') { throw new InvalidOperationException("Parse error after " + fieldName + " field, found: " + tokenizer.NextChar); } } while (true); }
private static void ReadFilters(Tokenizer tokenizer, GraphQuery query) { tokenizer.SkipWhitespace(); if (tokenizer.NextChar != '(') { return; } tokenizer.Advance(); do { tokenizer.SkipWhitespace(); if (tokenizer.NextChar == ')') { tokenizer.Advance(); return; } var parameter = tokenizer.ReadIdentifier(); tokenizer.SkipWhitespace(); if (tokenizer.NextChar != ':') { throw new InvalidOperationException("Filter " + parameter + " does not provide a value"); } tokenizer.Advance(); tokenizer.SkipWhitespace(); var value = tokenizer.ReadValue(); tokenizer.SkipWhitespace(); if (tokenizer.NextChar == ',') { tokenizer.Advance(); } else if (tokenizer.NextChar != ')') { throw new InvalidOperationException("Parse error after " + parameter + " filter, found: " + tokenizer.NextChar); } query.Filters.Add(parameter.ToString(), value.ToString()); } while (true); }
public ITokenizer <IMdToken> Advance() { if (AtEnd) { return(this); } return(new BoundedTokenizer(Tokenizer.Advance(), BoundPredicate)); }
public void CanTokenizeEmpty() { var tokenizer = new Tokenizer(string.Empty); Assert.False(tokenizer.Advance()); Assert.True(tokenizer.IsEnd); Assert.Null(tokenizer.CurrentValue); }
public void CanTokenizeBlank() { var tokenizer = new Tokenizer(" \t "); Assert.False(tokenizer.Advance()); Assert.True(tokenizer.IsEnd); Assert.Null(tokenizer.CurrentValue); }
private void ReadLeaderboard(Tokenizer tokenizer) { var leaderboard = new Leaderboard(); var part = tokenizer.ReadTo(':'); // id int num; if (Int32.TryParse(part.ToString(), out num)) { leaderboard.Id = num; } tokenizer.Advance(); leaderboard.Start = tokenizer.ReadQuotedString().ToString(); tokenizer.Advance(); leaderboard.Cancel = tokenizer.ReadQuotedString().ToString(); tokenizer.Advance(); leaderboard.Submit = tokenizer.ReadQuotedString().ToString(); tokenizer.Advance(); leaderboard.Value = tokenizer.ReadQuotedString().ToString(); tokenizer.Advance(); leaderboard.Format = Leaderboard.ParseFormat(tokenizer.ReadIdentifier().ToString()); tokenizer.Advance(); if (tokenizer.NextChar == '"') { leaderboard.Title = tokenizer.ReadQuotedString().ToString(); } else { leaderboard.Title = tokenizer.ReadTo(':').ToString(); } tokenizer.Advance(); if (tokenizer.NextChar == '"') { leaderboard.Description = tokenizer.ReadQuotedString().ToString(); } else { leaderboard.Description = tokenizer.ReadTo(':').ToString(); } tokenizer.Advance(); part = tokenizer.ReadTo(':'); if (Int32.TryParse(part.ToString(), out num)) { leaderboard.LowerIsBetter = (num != 0); } _leaderboards.Add(leaderboard); }
private static uint ReadHexNumber(Tokenizer tokenizer) { uint value = 0; do { uint charValue; switch (tokenizer.NextChar) { case '0': charValue = 0; break; case '1': charValue = 1; break; case '2': charValue = 2; break; case '3': charValue = 3; break; case '4': charValue = 4; break; case '5': charValue = 5; break; case '6': charValue = 6; break; case '7': charValue = 7; break; case '8': charValue = 8; break; case '9': charValue = 9; break; case 'a': case 'A': charValue = 10; break; case 'b': case 'B': charValue = 11; break; case 'c': case 'C': charValue = 12; break; case 'd': case 'D': charValue = 13; break; case 'e': case 'E': charValue = 14; break; case 'f': case 'F': charValue = 15; break; default: return(value); } tokenizer.Advance(); value <<= 4; value += charValue; } while (true); }
/// <summary> /// Gets the next line from the CSV file /// </summary> /// <returns><see cref="Token"/>s for the line, or <c>null</c> if no more lines found.</returns> public Token[] ReadLine() { if (_tokenizer.NextChar == '\0') { return(null); } var line = new List <Token>(); do { if (_tokenizer.NextChar == '\"') { line.Add(_tokenizer.ReadQuotedString()); } else { line.Add(_tokenizer.ReadTo(',', '\n').Trim()); } if (_tokenizer.NextChar != ',') { break; } _tokenizer.Advance(); } while (true); if (_tokenizer.NextChar != '\0') { if (_tokenizer.NextChar != '\r' && _tokenizer.NextChar != '\n') { throw new InvalidOperationException(String.Format("Expected comma or newline after entry '{0}', found '{1}'", line[line.Count - 1], _tokenizer.NextChar)); } while (_tokenizer.NextChar == '\r' || _tokenizer.NextChar == '\n') { _tokenizer.Advance(); } } return(line.ToArray()); }
private bool HandleColoredText() { if (!_tokenizer.Match("{{color|")) { return(false); } FlushInline(); while (_tokenizer.NextChar != '|') { if (_tokenizer.NextChar == '\0') { return(false); } _buffer.Append(_tokenizer.NextChar); _tokenizer.Advance(); } var color = _buffer.ToString(); _buffer.Length = 0; _tokenizer.Advance(); while (!_tokenizer.Match("}}")) { if (_tokenizer.NextChar == '\0') { return(false); } _buffer.Append(_tokenizer.NextChar); _tokenizer.Advance(); } var inline = new Run(_buffer.ToString()); inline.Foreground = (Brush) new BrushConverter().ConvertFromString(color); _formatStack.Peek().Add(inline); _buffer.Length = 0; return(true); }
private static uint ReadNumber(Tokenizer tokenizer) { uint value = 0; while (tokenizer.NextChar >= '0' && tokenizer.NextChar <= '9') { value *= 10; value += (uint)(tokenizer.NextChar - '0'); tokenizer.Advance(); } return(value); }
public void CanTokenizeOperators() { const string text = "!!=>+++==>==<???+&&***<>"; var tokenizer = new Tokenizer(text); var list = new List <string>(); while (tokenizer.Advance()) { list.Add((string)tokenizer.CurrentValue); } Assert.Equal(new[] { "!", "!=", ">", "++", "+=", "=>", "==", "<", "??", "?", "+", "&&", "**", "*", "<>" }, list); }
private static float ReadFloat(Tokenizer tokenizer) { bool isNegative = false; if (tokenizer.NextChar == '-') { tokenizer.Advance(); isNegative = true; } var token = tokenizer.ReadNumber(); var value = float.Parse(token.ToString(), System.Globalization.CultureInfo.InvariantCulture); if (isNegative) { value = -value; } return(value); }
static void ParseEnum (Annotations properties, MemberInfo parent, Tokenizer tokenizer) { FieldInfo field; StringBuilder value = new StringBuilder (); TypeInfo type = new TypeInfo (); type.Annotations = properties; type.IsEnum = true; tokenizer.AcceptOrThrow (Token2Type.Identifier, "enum"); if (tokenizer.CurrentToken.type == Token2Type.Identifier) { type.Name = tokenizer.GetIdentifier (); } else { type.Name = "<anonymous>"; } parent.Children.Add (type); tokenizer.AcceptOrThrow (Token2Type.Punctuation, "{"); //Console.WriteLine ("ParseEnum: {0}", name); while (tokenizer.CurrentToken.type == Token2Type.Identifier) { field = new FieldInfo (); field.Name = tokenizer.GetIdentifier (); value.Length = 0; if (tokenizer.Accept (Token2Type.Punctuation, "=")) { while (tokenizer.CurrentToken.value != "," && tokenizer.CurrentToken.value != "}") { value.Append (" "); value.Append (tokenizer.CurrentToken.value); tokenizer.Advance (true); } } field.Value = value.ToString (); type.Children.Add (field); //Console.WriteLine ("ParseEnum: {0}: {1} {2} {3}", name, field, value.Length != 0 != null ? "=" : "", value); if (!tokenizer.Accept (Token2Type.Punctuation, ",")) break; } tokenizer.AcceptOrThrow (Token2Type.Punctuation, "}"); tokenizer.AcceptOrThrow (Token2Type.Punctuation, ";"); }
static bool ParseMembers (MemberInfo parent, Tokenizer tokenizer) { Annotations properties = new Annotations (); TypeInfo parent_type = parent as TypeInfo; string accessibility; TypeReference returntype; bool is_dtor; bool is_ctor; bool is_virtual; bool is_static; bool is_const; bool is_extern; string name; //Console.WriteLine ("ParseMembers ({0})", type.Name); do { returntype = null; is_dtor = is_ctor = is_virtual = is_static = false; is_extern = is_const = false; name = null; properties = new Annotations (); if (parent_type != null) accessibility = parent_type.IsStruct ? "public" : "private"; else accessibility = "public"; try { if (tokenizer.Accept (Token2Type.Punctuation, ";")) continue; } catch { return false; } if (tokenizer.CurrentToken.value == "}") return true; while (tokenizer.CurrentToken.type == Token2Type.CommentProperty) { properties.Add (tokenizer.CurrentToken.value); tokenizer.Advance (true); } //Console.WriteLine ("ParseMembers: Current token: {0}", tokenizer.CurrentToken); if (tokenizer.CurrentToken.type == Token2Type.Identifier) { string v = tokenizer.CurrentToken.value; switch (v) { case "public": case "protected": case "private": accessibility = v; tokenizer.Advance (true); tokenizer.Accept (Token2Type.Punctuation, ":"); continue; case "enum": ParseEnum (properties, parent, tokenizer); continue; case "friend": while (!tokenizer.Accept (Token2Type.Punctuation, ";")) { tokenizer.Advance (true); } continue; case "struct": case "class": case "union": if (!ParseClassOrStruct (properties, parent, tokenizer)) return false; continue; case "typedef": StringBuilder requisite = new StringBuilder (); requisite.Append (tokenizer.CurrentToken.value); requisite.Append (' '); tokenizer.Advance (true); while (!tokenizer.Accept (Token2Type.Punctuation, ";")) { requisite.Append (tokenizer.CurrentToken.value); requisite.Append (' '); if (tokenizer.CurrentToken.value == "{") { tokenizer.Advance (true); while (!tokenizer.Accept (Token2Type.Punctuation, "}")) { requisite.Append (tokenizer.CurrentToken.value); requisite.Append (' '); tokenizer.Advance (true); } requisite.Append (tokenizer.CurrentToken.value); requisite.Append (' '); } tokenizer.Advance (true); } requisite.Append (";"); if (properties.ContainsKey ("CBindingRequisite")) cbinding_requisites.AppendLine (requisite.ToString ()); continue; case "EVENTHANDLER": while (!tokenizer.Accept (Token2Type.Punctuation, ";")) tokenizer.Advance (true); continue; case "template": tokenizer.Advance (true); tokenizer.AcceptOrThrow (Token2Type.Punctuation, "<"); tokenizer.AcceptOrThrow (Token2Type.Identifier, "typename"); tokenizer.GetIdentifier (); tokenizer.AcceptOrThrow (Token2Type.Punctuation, ">"); continue; case "using": tokenizer.Advance (true); continue; case "namespace": tokenizer.Advance (true); tokenizer.GetIdentifier (); tokenizer.Accept (Token2Type.Punctuation, "{"); continue; case "mutable": tokenizer.Advance (true); continue; } } do { if (tokenizer.Accept (Token2Type.Identifier, "virtual")) { is_virtual = true; continue; } if (tokenizer.Accept (Token2Type.Identifier, "static")) { is_static = true; continue; } if (tokenizer.Accept (Token2Type.Identifier, "const")) { is_const = true; continue; } if (tokenizer.Accept (Token2Type.Identifier, "extern")) { is_extern = true; continue; } if (tokenizer.Accept (Token2Type.Identifier, "volatile")) { continue; } if (tokenizer.Accept (Token2Type.Identifier, "G_GNUC_INTERNAL")) { continue; } break; } while (true); if (is_extern && tokenizer.Accept (Token2Type.Literal, "C")) { tokenizer.SyncWithEndBrace (); continue; } if (tokenizer.Accept (Token2Type.Punctuation, "~")) { is_dtor = true; if (!is_virtual) { TypeInfo ti = parent as TypeInfo; if (ti != null && ti.Base != null) Console.WriteLine ("The class {0} has a non-virtual destructor, and it's base class is {2} ({1}).", parent.Name, parent.Header, ti != null && ti.Base != null ? ti.Base.Value : "<none>"); } } if (is_dtor) { name = "~" + tokenizer.GetIdentifier (); returntype = new TypeReference ("void"); } else { returntype = ParseTypeReference (tokenizer); if (tokenizer.CurrentToken.value == "<") { tokenizer.Advance (true); while (!tokenizer.Accept (Token2Type.Punctuation, ">")) tokenizer.Advance (true); } if (returntype.Value == parent.Name && tokenizer.CurrentToken.value == "(") { is_ctor = true; name = returntype.Value; returntype.Value += "*"; } else { name = tokenizer.GetIdentifier (); } } returntype.IsConst = is_const; returntype.IsReturnType = true; //Console.WriteLine ("ParseMembers: found member '{0}' is_ctor: {1}", name, is_ctor); if (tokenizer.Accept (Token2Type.Punctuation, "(")) { // Method MethodInfo method = new MethodInfo (); method.Header = tokenizer.CurrentFile; method.Parent = parent; method.Annotations = properties; method.Name = name; method.IsConstructor = is_ctor; method.IsDestructor = is_dtor; method.IsVirtual = is_virtual; method.IsStatic = is_static; method.IsPublic = accessibility == "public"; method.IsPrivate = accessibility == "private"; method.IsProtected = accessibility == "protected"; method.ReturnType = returntype; //Console.WriteLine ("ParseMembers: found method '{0}' is_ctor: {1}", name, is_ctor); if (!tokenizer.Accept (Token2Type.Punctuation, ")")) { string param_value = null; do { ParameterInfo parameter = new ParameterInfo (method); while (tokenizer.CurrentToken.type == Token2Type.CommentProperty) { parameter.Annotations.Add (tokenizer.CurrentToken.value); tokenizer.Advance (true); } if (tokenizer.Accept (Token2Type.Punctuation, ".") && tokenizer.Accept (Token2Type.Punctuation, ".") && tokenizer.Accept (Token2Type.Punctuation, ".")) { // ... variable argument declaration parameter.ParameterType = new TypeReference ("..."); } else { if (tokenizer.CurrentToken.type == Token2Type.Identifier) { if (tokenizer.Accept (Token2Type.Identifier, "Moonlight")) { tokenizer.Accept (Token2Type.Punctuation, ":"); tokenizer.Accept (Token2Type.Punctuation, ":"); } } parameter.ParameterType = ParseTypeReference (tokenizer); } if (tokenizer.CurrentToken.value != "," && tokenizer.CurrentToken.value != ")") { parameter.Name = tokenizer.GetIdentifier (); if (tokenizer.Accept (Token2Type.Punctuation, "[")) { if (tokenizer.CurrentToken.type == Token2Type.Identifier) tokenizer.Advance (true); tokenizer.AcceptOrThrow (Token2Type.Punctuation, "]"); } if (tokenizer.Accept (Token2Type.Punctuation, "=")) { param_value = string.Empty; if (tokenizer.Accept (Token2Type.Punctuation, "-")) param_value = "-"; param_value += tokenizer.GetIdentifier (); if (tokenizer.Accept (Token2Type.Punctuation, ":")) { tokenizer.AcceptOrThrow (Token2Type.Punctuation, ":"); param_value += "::" + tokenizer.GetIdentifier (); } } } method.Parameters.Add (parameter); //Console.WriteLine ("ParseMember: got parameter, type: '{0}' name: '{1}' value: '{2}'", parameter.ParameterType.Value, parameter.Name, param_value); } while (tokenizer.Accept (Token2Type.Punctuation, ",")); tokenizer.AcceptOrThrow (Token2Type.Punctuation, ")"); } parent.Children.Add (method); //Allow const member functions, ignore the const keyword tokenizer.Accept (Token2Type.Identifier, "const"); if (tokenizer.CurrentToken.value == "{") { //Console.WriteLine ("ParseMember: member has body, skipping it"); tokenizer.SyncWithEndBrace (); } else if (is_ctor && tokenizer.Accept (Token2Type.Punctuation, ":")) { // ctor method implemented in header with field initializers and/or base class ctor call tokenizer.FindStartBrace (); tokenizer.SyncWithEndBrace (); //Console.WriteLine ("ParseMember: skipped ctor method implementation"); } else if (tokenizer.Accept (Token2Type.Punctuation, "=")) { // pure virtual method tokenizer.AcceptOrThrow (Token2Type.Identifier, "0"); tokenizer.AcceptOrThrow (Token2Type.Punctuation, ";"); method.IsAbstract = true; } else { if (tokenizer.Accept (Token2Type.Identifier, "__attribute__")) { tokenizer.AcceptOrThrow (Token2Type.Punctuation, "("); tokenizer.AcceptOrThrow (Token2Type.Punctuation, "("); if (tokenizer.CurrentToken.type == Token2Type.Identifier) tokenizer.Advance (true); tokenizer.AcceptOrThrow (Token2Type.Punctuation, ")"); tokenizer.AcceptOrThrow (Token2Type.Punctuation, ")"); } tokenizer.AcceptOrThrow (Token2Type.Punctuation, ";"); } } else { if (is_ctor || is_dtor) throw new Exception (string.Format ("Expected '(', not '{0}'", tokenizer.CurrentToken.value)); if (name == "operator") { while (true) { if (tokenizer.CurrentToken.value == ";") { // End of operator break; } else if (tokenizer.CurrentToken.value == "{") { // In-line implementation tokenizer.SyncWithEndBrace (); break; } tokenizer.Advance (true); } //Console.WriteLine ("ParseMembers: skipped operator"); } else { FieldInfo field = new FieldInfo (); field.IsConst = is_const; field.IsStatic = is_static; field.IsExtern = is_extern; field.Name = name; field.FieldType = returntype; field.IsPublic = accessibility == "public"; field.IsPrivate = accessibility == "private"; field.IsProtected = accessibility == "protected"; field.Annotations = properties; // Field do { //Console.WriteLine ("ParseMembers: found field '{0}'", name); field.Parent = parent; parent.Children.Add (field); if (tokenizer.Accept (Token2Type.Punctuation, "[")) { while (!tokenizer.Accept (Token2Type.Punctuation, "]")) { tokenizer.Advance (true); } } if (tokenizer.Accept (Token2Type.Punctuation, ":")) { field.BitField = tokenizer.GetIdentifier (); } if (tokenizer.Accept (Token2Type.Punctuation, ",")) { field = new FieldInfo (); if (tokenizer.Accept (Token2Type.Punctuation, "*")) { // ok } field.Name = tokenizer.GetIdentifier (); field.FieldType = returntype; continue; } if (tokenizer.Accept (Token2Type.Punctuation, "=")) { tokenizer.Advance (true); /* this can be an arbitrary long expression, sync with the ';'? */ } break; } while (true); tokenizer.Accept (Token2Type.Punctuation, ";"); } } } while (true); }
// Returns false if there are no more tokens (reached end of code) static bool ParseClassOrStruct (Annotations annotations, MemberInfo parent, Tokenizer tokenizer) { TypeInfo type = new TypeInfo (); type.Annotations = annotations; type.Header = tokenizer.CurrentFile; type.Parent = parent; type.IsPublic = tokenizer.Accept (Token2Type.Identifier, "public"); if (tokenizer.Accept (Token2Type.Identifier, "class")) { type.IsClass = true; } else if (tokenizer.Accept (Token2Type.Identifier, "struct")) { type.IsStruct = true; type.IsValueType = true; } else if (tokenizer.Accept (Token2Type.Identifier, "union")) { type.IsStruct = true; // Not entirely correct, but a union can be parsed as a struct type.IsValueType = true; } else { throw new Exception (string.Format ("Expected 'class' or 'struct', not '{0}'", tokenizer.CurrentToken.value)); } if (tokenizer.CurrentToken.type == Token2Type.Identifier) { type.Name = tokenizer.GetIdentifier (); } else { type.Name = "<anonymous>"; } if (tokenizer.Accept (Token2Type.Punctuation, ";")) { // A forward declaration. //Console.WriteLine ("ParseType: Found a forward declaration to {0}", type.Name); return true; } if (tokenizer.Accept (Token2Type.Punctuation, ":")) { if (!tokenizer.Accept (Token2Type.Identifier, "public") && type.IsClass) throw new Exception (string.Format ("The base class of {0} is not public.", type.Name)); type.Base = ParseTypeReference (tokenizer); // accept multiple inheritance the easy way while (tokenizer.CurrentToken.value == ",") { tokenizer.Accept (Token2Type.Punctuation, ","); while (tokenizer.CurrentToken.value != "," && tokenizer.CurrentToken.value != "{") tokenizer.GetIdentifier (); } //Console.WriteLine ("ParseType: Found {0}'s base class: {1}", type.Name, type.Base); } tokenizer.AcceptOrThrow (Token2Type.Punctuation, "{"); //Console.WriteLine ("ParseType: Found a type: {0} in {1}", type.Name, type.Header); parent.Children.Add (type); ParseMembers (type, tokenizer); tokenizer.AcceptOrThrow (Token2Type.Punctuation, "}"); if (tokenizer.CurrentToken.type == Token2Type.Identifier) tokenizer.Advance (true); if (tokenizer.CurrentToken.value != ";") throw new Exception (string.Format ("Expected ';', not '{0}'", tokenizer.CurrentToken.value)); return tokenizer.Advance (false); }
static GlobalInfo GetTypes2 () { string srcdir = Path.Combine (Environment.CurrentDirectory, "src"); string plugindir = Path.Combine (Environment.CurrentDirectory, "plugin"); string paldir = Path.Combine (srcdir, "pal"); string palgtkdir = Path.Combine (paldir, "gtk"); List<string> all_files = new List<string> (); all_files.AddRange (Directory.GetFiles (srcdir, "*.h")); all_files.AddRange (Directory.GetFiles (plugindir, "*.h")); all_files.AddRange (Directory.GetFiles (paldir, "*.h")); all_files.AddRange (Directory.GetFiles (palgtkdir, "*.h")); RemoveExcludedSrcFiles (srcdir, plugindir, paldir, all_files); Tokenizer tokenizer = new Tokenizer (all_files.ToArray ()); GlobalInfo all = new GlobalInfo (); tokenizer.Advance (false); try { while (ParseMembers (all, tokenizer)) { try { tokenizer.Accept (Token2Type.Punctuation, "}"); tokenizer.Accept (Token2Type.Punctuation, ";"); } catch {} } } catch (Exception ex) { throw new Exception (string.Format ("{0}({1}): {2}", tokenizer.CurrentFile, tokenizer.CurrentLine, ex.Message), ex); } // Add all the manual types TypeInfo t; TypeInfo IComparableInfo; TypeInfo IFormattableInfo; TypeInfo IConvertibleInfo; TypeInfo IEquatableBoolInfo; TypeInfo IComparableBoolInfo; TypeInfo IEquatableDoubleInfo; TypeInfo IComparableDoubleInfo; TypeInfo IEquatableFloatInfo; TypeInfo IComparableFloatInfo; TypeInfo IEquatableCharInfo; TypeInfo IComparableCharInfo; TypeInfo IEquatableIntInfo; TypeInfo IComparableIntInfo; TypeInfo IEquatableLongInfo; TypeInfo IComparableLongInfo; TypeInfo IEquatableStringInfo; TypeInfo IComparableStringInfo; TypeInfo IEquatableTimeSpanInfo; TypeInfo IComparableTimeSpanInfo; TypeInfo IEquatableUintInfo; TypeInfo IComparableUintInfo; TypeInfo IEquatableUlongInfo; TypeInfo IComparableUlongInfo; all.Children.Add (new TypeInfo ("object", "OBJECT", "INVALID", true, true)); all.Children.Add (IComparableInfo = new TypeInfo ("IComparable", "ICOMPARABLE", "OBJECT", true, true, false, true)); all.Children.Add (IFormattableInfo = new TypeInfo ("IFormattable", "IFORMATTABLE", "OBJECT", true, true, false, true)); all.Children.Add (IConvertibleInfo = new TypeInfo ("IConvertible", "ICONVERTIBLE", "OBJECT", true, true, false, true)); all.Children.Add (IEquatableBoolInfo = new TypeInfo ("IEquatable<bool>", "IEQUATABLE_BOOL", "OBJECT", true, true, false, true)); all.Children.Add (IComparableBoolInfo = new TypeInfo ("IComparable<bool>", "ICOMPARABLE_BOOL", "OBJECT", true, true, false, true)); all.Children.Add (IEquatableDoubleInfo = new TypeInfo ("IEquatable<double>", "IEQUATABLE_DOUBLE", "OBJECT", true, true, false, true)); all.Children.Add (IComparableDoubleInfo = new TypeInfo ("IComparable<double>", "ICOMPARABLE_DOUBLE", "OBJECT", true, true, false, true)); all.Children.Add (IEquatableFloatInfo = new TypeInfo ("IEquatable<float>", "IEQUATABLE_FLOAT", "OBJECT", true, true, false, true)); all.Children.Add (IComparableFloatInfo = new TypeInfo ("IComparable<float>", "ICOMPARABLE_FLOAT", "OBJECT", true, true, false, true)); all.Children.Add (IEquatableCharInfo = new TypeInfo ("IEquatable<char>", "IEQUATABLE_CHAR", "OBJECT", true, true, false, true)); all.Children.Add (IComparableCharInfo = new TypeInfo ("IComparable<char>", "ICOMPARABLE_CHAR", "OBJECT", true, true, false, true)); all.Children.Add (IEquatableIntInfo = new TypeInfo ("IEquatable<int>", "IEQUATABLE_INT", "OBJECT", true, true, false, true)); all.Children.Add (IComparableIntInfo = new TypeInfo ("IComparable<int>", "ICOMPARABLE_INT", "OBJECT", true, true, false, true)); all.Children.Add (IEquatableLongInfo = new TypeInfo ("IEquatable<long>", "IEQUATABLE_LONG", "OBJECT", true, true, false, true)); all.Children.Add (IComparableLongInfo = new TypeInfo ("IComparable<long>", "ICOMPARABLE_LONG", "OBJECT", true, true, false, true)); all.Children.Add (IEquatableStringInfo = new TypeInfo ("IEquatable<string>", "IEQUATABLE_STRING", "OBJECT", true, true, false, true)); all.Children.Add (IComparableStringInfo = new TypeInfo ("IComparable<string>", "ICOMPARABLE_STRING", "OBJECT", true, true, false, true)); all.Children.Add (IEquatableTimeSpanInfo = new TypeInfo ("IEquatable<TimeSpan>", "IEQUATABLE_TIMESPAN", "OBJECT", true, true, false, true)); all.Children.Add (IComparableTimeSpanInfo = new TypeInfo ("IComparable<TimeSpan>", "ICOMPARABLE_TIMESPAN", "OBJECT", true, true, false, true)); all.Children.Add (IEquatableUintInfo = new TypeInfo ("IEquatable<uint>", "IEQUATABLE_UINT", "OBJECT", true, true, false, true)); all.Children.Add (IComparableUintInfo = new TypeInfo ("IComparable<uint>", "ICOMPARABLE_UINT", "OBJECT", true, true, false, true)); all.Children.Add (IEquatableUlongInfo = new TypeInfo ("IEquatable<ulong>", "IEQUATABLE_ULONG", "OBJECT", true, true, false, true)); all.Children.Add (IComparableUlongInfo = new TypeInfo ("IComparable<ulong>", "ICOMPARABLE_ULONG", "OBJECT", true, true, false, true)); all.Children.Add (t = new TypeInfo ("bool", "BOOL", "OBJECT", true, true, true, false)); t.Interfaces.Add (IComparableInfo); t.Interfaces.Add (IComparableBoolInfo); t.Interfaces.Add (IConvertibleInfo); t.Interfaces.Add (IEquatableBoolInfo); all.Children.Add (t = new TypeInfo ("float", "FLOAT", "OBJECT", true, true, true, false)); t.Interfaces.Add (IComparableInfo); t.Interfaces.Add (IComparableFloatInfo); t.Interfaces.Add (IConvertibleInfo); t.Interfaces.Add (IEquatableFloatInfo); t.Interfaces.Add (IFormattableInfo); all.Children.Add (t = new TypeInfo ("double", "DOUBLE", "OBJECT", true, true, true, false)); t.Interfaces.Add (IComparableInfo); t.Interfaces.Add (IComparableDoubleInfo); t.Interfaces.Add (IConvertibleInfo); t.Interfaces.Add (IEquatableDoubleInfo); t.Interfaces.Add (IFormattableInfo); all.Children.Add (t = new TypeInfo ("guint64", "UINT64", "OBJECT", true, true, true, false)); t.Interfaces.Add (IComparableInfo); t.Interfaces.Add (IComparableUlongInfo); t.Interfaces.Add (IConvertibleInfo); t.Interfaces.Add (IEquatableUlongInfo); t.Interfaces.Add (IFormattableInfo); all.Children.Add (t = new TypeInfo ("gint64", "INT64", "OBJECT", true, true, true, false)); t.Interfaces.Add (IComparableInfo); t.Interfaces.Add (IComparableLongInfo); t.Interfaces.Add (IConvertibleInfo); t.Interfaces.Add (IEquatableLongInfo); t.Interfaces.Add (IFormattableInfo); all.Children.Add (t = new TypeInfo ("guint32", "UINT32", "OBJECT", true, true, true, false)); t.Interfaces.Add (IComparableInfo); t.Interfaces.Add (IComparableUintInfo); t.Interfaces.Add (IConvertibleInfo); t.Interfaces.Add (IEquatableUintInfo); t.Interfaces.Add (IFormattableInfo); all.Children.Add (t = new TypeInfo ("gint32", "INT32", "OBJECT", true, true, true, false)); t.Interfaces.Add (IComparableInfo); t.Interfaces.Add (IComparableIntInfo); t.Interfaces.Add (IConvertibleInfo); t.Interfaces.Add (IEquatableIntInfo); t.Interfaces.Add (IFormattableInfo); all.Children.Add (t = new TypeInfo ("char*", "STRING", "OBJECT", true, true, true, false)); t.Interfaces.Add (IComparableInfo); t.Interfaces.Add (IComparableStringInfo); t.Interfaces.Add (IConvertibleInfo); t.Interfaces.Add (IEquatableStringInfo); t.Interfaces.Add (IFormattableInfo); all.Children.Add (t = new TypeInfo ("TimeSpan", "TIMESPAN", "OBJECT", true, true, true, false)); t.Interfaces.Add (IComparableInfo); t.Interfaces.Add (IComparableTimeSpanInfo); t.Interfaces.Add (IEquatableTimeSpanInfo); all.Children.Add (t = new TypeInfo ("char", "CHAR", "OBJECT", true, true, true, false)); t.Interfaces.Add (IComparableInfo); t.Interfaces.Add (IComparableCharInfo); t.Interfaces.Add (IConvertibleInfo); t.Interfaces.Add (IEquatableCharInfo); all.Children.Add (new TypeInfo ("NPObj", "NPOBJ", "OBJECT", true, true, true, false)); all.Children.Add (new TypeInfo ("System.Windows.Markup.XmlLanguage", "XMLLANGUAGE", "OBJECT", true, true)); // Set IncludeInKinds for all types which inherit from EventObject foreach (MemberInfo member in all.Children.Values) { TypeInfo type = member as TypeInfo; if (type == null) continue; if (type.Name == "EventObject") type.Annotations ["IncludeInKinds"] = null; TypeReference bR = type.Base; MemberInfo m; TypeInfo b; while (bR != null) { if (bR.Value == "EventObject") { member.Annotations ["IncludeInKinds"] = null; } if (!all.Children.TryGetValue (bR.Value, out m)) break; b = m as TypeInfo; if (b != null) bR = b.Base; else bR = null; } } return all; }
/// <summary> /// Creates a <see cref="Field"/> from a serialized value. /// </summary> /// <param name="tokenizer">The tokenizer.</param> internal static Field Deserialize(Tokenizer tokenizer) { var fieldType = FieldType.MemoryAddress; if (tokenizer.NextChar == 'd') { fieldType = FieldType.PreviousValue; tokenizer.Advance(); } else if (tokenizer.NextChar == 'p') { fieldType = FieldType.PriorValue; tokenizer.Advance(); } if (!tokenizer.Match("0x")) { return new Field { Type = FieldType.Value, Value = ReadNumber(tokenizer) } } ; FieldSize size = FieldSize.None; switch (tokenizer.NextChar) { case 'M': size = FieldSize.Bit0; tokenizer.Advance(); break; case 'N': size = FieldSize.Bit1; tokenizer.Advance(); break; case 'O': size = FieldSize.Bit2; tokenizer.Advance(); break; case 'P': size = FieldSize.Bit3; tokenizer.Advance(); break; case 'Q': size = FieldSize.Bit4; tokenizer.Advance(); break; case 'R': size = FieldSize.Bit5; tokenizer.Advance(); break; case 'S': size = FieldSize.Bit6; tokenizer.Advance(); break; case 'T': size = FieldSize.Bit7; tokenizer.Advance(); break; case 'L': size = FieldSize.LowNibble; tokenizer.Advance(); break; case 'U': size = FieldSize.HighNibble; tokenizer.Advance(); break; case 'H': size = FieldSize.Byte; tokenizer.Advance(); break; case 'X': size = FieldSize.DWord; tokenizer.Advance(); break; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'a': case 'B': case 'b': case 'C': case 'c': case 'D': case 'd': case 'E': case 'e': case 'F': case 'f': size = FieldSize.Word; break; case ' ': size = FieldSize.Word; tokenizer.Advance(); break; } uint address = 0; do { uint charValue = 255; switch (tokenizer.NextChar) { case '0': charValue = 0; break; case '1': charValue = 1; break; case '2': charValue = 2; break; case '3': charValue = 3; break; case '4': charValue = 4; break; case '5': charValue = 5; break; case '6': charValue = 6; break; case '7': charValue = 7; break; case '8': charValue = 8; break; case '9': charValue = 9; break; case 'a': case 'A': charValue = 10; break; case 'b': case 'B': charValue = 11; break; case 'c': case 'C': charValue = 12; break; case 'd': case 'D': charValue = 13; break; case 'e': case 'E': charValue = 14; break; case 'f': case 'F': charValue = 15; break; } if (charValue == 255) { break; } tokenizer.Advance(); address <<= 4; address += charValue; } while (true); return(new Field { Size = size, Type = fieldType, Value = address }); }
public void CanTokenize() { const string text = "\tf(123*-foo, 'it''s'+\"\\ntime\")\n"; var tokenizer = new Tokenizer(text); Assert.True(tokenizer.Advance()); Assert.True(tokenizer.IsName("f")); Assert.Equal("f", tokenizer.CurrentValue); Assert.True(tokenizer.Advance()); Assert.True(tokenizer.IsOperator("(")); Assert.True(tokenizer.Advance()); Assert.True(tokenizer.IsNumber()); Assert.Equal(123.0, tokenizer.CurrentValue); Assert.True(tokenizer.Advance()); Assert.True(tokenizer.IsOperator("*")); Assert.Equal("*", tokenizer.CurrentValue); Assert.True(tokenizer.Advance()); Assert.True(tokenizer.IsOperator("+", "-")); Assert.True(tokenizer.Advance()); Assert.True(tokenizer.IsName("foo")); Assert.True(tokenizer.Advance()); Assert.True(tokenizer.IsOperator(",")); Assert.True(tokenizer.Advance()); Assert.True(tokenizer.IsString()); Assert.Equal("it's", tokenizer.CurrentValue); Assert.True(tokenizer.Advance()); Assert.True(tokenizer.IsOperator("+")); Assert.True(tokenizer.Advance()); Assert.True(tokenizer.IsString()); Assert.Equal("\ntime", tokenizer.CurrentValue); Assert.True(tokenizer.Advance()); Assert.True(tokenizer.IsOperator(")")); Assert.False(tokenizer.Advance()); Assert.True(tokenizer.IsEnd); Assert.Equal(text.Length, tokenizer.Index); // Idempotence at end of input: Assert.False(tokenizer.Advance()); Assert.True(tokenizer.IsEnd); }
private void ReadAchievement(Tokenizer tokenizer) { var achievement = new AchievementBuilder(); var part = tokenizer.ReadTo(':'); // id int num; if (Int32.TryParse(part.ToString(), out num)) { achievement.Id = num; } tokenizer.Advance(); if (tokenizer.NextChar == '"') { var requirements = tokenizer.ReadQuotedString(); achievement.ParseRequirements(Tokenizer.CreateTokenizer(requirements)); } else { achievement.ParseRequirements(tokenizer); } tokenizer.Advance(); if (tokenizer.NextChar == '"') { achievement.Title = tokenizer.ReadQuotedString().ToString(); } else { achievement.Title = tokenizer.ReadTo(':').ToString(); } tokenizer.Advance(); if (tokenizer.NextChar == '"') { achievement.Description = tokenizer.ReadQuotedString().ToString(); } else { achievement.Description = tokenizer.ReadTo(':').ToString(); } tokenizer.Advance(); tokenizer.ReadTo(':'); // deprecated tokenizer.Advance(); tokenizer.ReadTo(':'); // deprecated tokenizer.Advance(); tokenizer.ReadTo(':'); // deprecated tokenizer.Advance(); tokenizer.ReadTo(':'); // author tokenizer.Advance(); part = tokenizer.ReadTo(':'); // points if (Int32.TryParse(part.ToString(), out num)) { achievement.Points = num; } tokenizer.Advance(); var published = tokenizer.ReadTo(':'); // created timestamp tokenizer.Advance(); var updated = tokenizer.ReadTo(':'); // updated timestamp tokenizer.Advance(); tokenizer.ReadTo(':'); // upvotes tokenizer.Advance(); tokenizer.ReadTo(':'); // downvotes tokenizer.Advance(); if (tokenizer.NextChar == '"') { achievement.BadgeName = tokenizer.ReadQuotedString().ToString(); } else { achievement.BadgeName = tokenizer.ReadTo(':').ToString(); } if (achievement.BadgeName.EndsWith("_lock")) { achievement.BadgeName.Remove(achievement.BadgeName.Length - 5); } var builtAchievement = achievement.ToAchievement(); if (published != "0" && Int32.TryParse(published.ToString(), out num)) { builtAchievement.Published = _unixEpoch.AddSeconds(num); } if (updated != "0" && Int32.TryParse(updated.ToString(), out num)) { builtAchievement.LastModified = _unixEpoch.AddSeconds(num); } _achievements.Add(builtAchievement); }
private void ParseArray(string fieldName, Tokenizer tokenizer) { if (tokenizer.NextChar != '[') { throw new InvalidOperationException("Array should start with an opening bracket, found " + tokenizer.NextChar); } tokenizer.Advance(); tokenizer.SkipWhitespace(); var items = new List <JsonObject>(); var strings = new List <string>(); var ints = new List <int>(); var dbls = new List <double>(); while (tokenizer.NextChar != ']') { if (tokenizer.NextChar == '{') { var itemName = String.Format("{0}[{1}]", fieldName, items.Count); var item = new JsonObject(); item.ParseObject(itemName, tokenizer); items.Add(item); } else if (tokenizer.NextChar == '"') { var str = tokenizer.ReadQuotedString(); strings.Add(str.ToString()); } else if (Char.IsDigit(tokenizer.NextChar)) { var value = tokenizer.ReadNumber(); if (value.Contains('.')) { var dVal = Double.Parse(value.ToString()); dbls.Add(dVal); } else { var iVal = Int32.Parse(value.ToString()); ints.Add(iVal); } } else { throw new NotSupportedException(fieldName + " array element starting with " + tokenizer.NextChar); } tokenizer.SkipWhitespace(); if (tokenizer.NextChar == ',') { tokenizer.Advance(); tokenizer.SkipWhitespace(); } } tokenizer.Advance(); if (strings.Count > 0) { AddField(fieldName, JsonFieldType.StringArray, strings.ToArray()); } else if (ints.Count > 0) { AddField(fieldName, JsonFieldType.IntegerArray, ints.ToArray()); } else if (dbls.Count > 0) { AddField(fieldName, JsonFieldType.DoubleArray, dbls.ToArray()); } else { AddField(fieldName, JsonFieldType.ObjectArray, items.ToArray()); } }
private void ParseObject(string parentObjectName, Tokenizer tokenizer) { if (tokenizer.NextChar != '{') { throw new InvalidOperationException("Object should start with an opening brace, found " + tokenizer.NextChar); } tokenizer.Advance(); tokenizer.SkipWhitespace(); while (tokenizer.NextChar != '}') { if (tokenizer.NextChar != '"') { if (tokenizer.NextChar == 0) { throw new InvalidOperationException("End of stream encountered processing fields for " + parentObjectName + " object"); } throw new InvalidOperationException("Field name should be in quotes, found " + tokenizer.NextChar); } var fieldName = tokenizer.ReadQuotedString(); tokenizer.SkipWhitespace(); if (tokenizer.NextChar != ':') { throw new InvalidOperationException("Expecting colon following field name: " + fieldName); } tokenizer.Advance(); tokenizer.SkipWhitespace(); Token value; switch (tokenizer.NextChar) { case '{': var nestedObject = new JsonObject(); nestedObject.ParseObject(fieldName.ToString(), tokenizer); AddField(fieldName.ToString(), nestedObject); break; case '"': value = tokenizer.ReadQuotedString(); if (value.Length == 11 && value[10] == 'Z' && value[4] == '-' && value[7] == '-') { int year, month, day; if (Int32.TryParse(value.Substring(0, 4), out year) && Int32.TryParse(value.Substring(5, 2), out month) && Int32.TryParse(value.Substring(8, 2), out day)) { AddField(fieldName.ToString(), JsonFieldType.Date, value.ToString()); break; } } else if (value.Length == 17 && value[16] == 'Z' && value[4] == '-' && value[7] == '-' && value[10] == 'T' && value[13] == ':') { int year, month, day, hour, minute; if (Int32.TryParse(value.Substring(0, 4), out year) && Int32.TryParse(value.Substring(5, 2), out month) && Int32.TryParse(value.Substring(8, 2), out day) && Int32.TryParse(value.Substring(11, 2), out hour) && Int32.TryParse(value.Substring(14, 2), out minute)) { AddField(fieldName.ToString(), JsonFieldType.DateTime, value.Substring(0, 16) + ":00.000Z"); break; } } else if (value.Length == 24 && value[23] == 'Z' && value[4] == '-' && value[7] == '-' && value[10] == 'T' && value[13] == ':' && value[16] == ':' && value[19] == '.') { int year, month, day, hour, minute, second, millisecond; if (Int32.TryParse(value.Substring(0, 4), out year) && Int32.TryParse(value.Substring(5, 2), out month) && Int32.TryParse(value.Substring(8, 2), out day) && Int32.TryParse(value.Substring(11, 2), out hour) && Int32.TryParse(value.Substring(14, 2), out minute) && Int32.TryParse(value.Substring(17, 2), out second) && Int32.TryParse(value.Substring(20, 3), out millisecond)) { AddField(fieldName.ToString(), JsonFieldType.DateTime, value.ToString()); break; } } AddField(fieldName.ToString(), value.ToString()); break; case '[': ParseArray(fieldName.ToString(), tokenizer); break; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': value = tokenizer.ReadNumber(); if (value.Contains('.')) { var dVal = Double.Parse(value.ToString()); AddField(fieldName.ToString(), dVal); } else { var iVal = Int32.Parse(value.ToString()); AddField(fieldName.ToString(), iVal); } break; default: value = tokenizer.ReadIdentifier(); if (value.CompareTo("true", StringComparison.InvariantCultureIgnoreCase) == 0) { AddField(fieldName.ToString(), true); } else if (value.CompareTo("false", StringComparison.InvariantCultureIgnoreCase) == 0) { AddField(fieldName.ToString(), false); } else if (value.CompareTo("null", StringComparison.InvariantCultureIgnoreCase) == 0) { AddField(fieldName.ToString(), JsonFieldType.Null, null); } else { throw new NotSupportedException("Unsupported raw field value: " + value); } break; } tokenizer.SkipWhitespace(); if (tokenizer.NextChar == ',') { tokenizer.Advance(); tokenizer.SkipWhitespace(); } } tokenizer.Advance(); }
/// <summary> /// Advances over the next token. /// </summary> public XmlTokenType Advance() { // tag if (_tokenizer.NextChar == '<') { _tokenizer.StartToken(); _tokenizer.Advance(); bool isClosing = (_tokenizer.NextChar == '/'); while (_tokenizer.NextChar != '>' && _tokenizer.NextChar != '\0') { if (_tokenizer.NextChar == '"') { _tokenizer.Advance(); while (_tokenizer.NextChar != '"' && _tokenizer.NextChar != '\0') { if (_tokenizer.NextChar == '\\') { _tokenizer.Advance(); } _tokenizer.Advance(); } } _tokenizer.Advance(); } _tokenizer.Advance(); NextToken = _tokenizer.EndToken(); NextTokenType = isClosing ? XmlTokenType.CloseTag : XmlTokenType.OpenTag; return(NextTokenType); } // end of input if (_tokenizer.NextChar == '\0') { if (NextTokenType != XmlTokenType.None) { NextToken = new Token(); NextTokenType = XmlTokenType.None; } return(XmlTokenType.None); } // content _tokenizer.StartToken(); _tokenizer.SkipWhitespace(); if (_tokenizer.NextChar == '<') { // whitespace-only content. ignore _tokenizer.EndToken(); return(Advance()); } do { _tokenizer.Advance(); } while (_tokenizer.NextChar != '<' && _tokenizer.NextChar != '\0'); NextToken = _tokenizer.EndToken(); NextTokenType = XmlTokenType.Content; return(XmlTokenType.Content); }
public void Convert(string input, InlineCollection inlinesToPopulate) { _formatStack.Push(inlinesToPopulate); _isNewLine = true; _tokenizer = Tokenizer.CreateTokenizer(input); while (_tokenizer.NextChar != '\0') { switch (_tokenizer.NextChar) { case '\'': if (_tokenizer.Match("'''")) { FlushInline(); ToggleState(ref _isBold, () => new Bold()); continue; } if (_tokenizer.Match("''")) { FlushInline(); ToggleState(ref _isItalic, () => new Italic()); continue; } break; case '=': if (HandleHeader()) { continue; } break; case ':': if (_isNewLine && HandleIndent()) { continue; } break; case '[': if (!_isLink && _tokenizer.Match("[[")) { _isRedirectedLink = false; FlushInline(); ToggleState(ref _isLink, () => new Hyperlink { Command = _hyperlinkCommand }); continue; } break; case ']': if (_isLink && _tokenizer.Match("]]")) { var parameter = _buffer.ToString(); _buffer.Length = 0; if (!_isRedirectedLink) { FlushInline(); } ToggleState(ref _isLink, null); var hyperlink = _formatStack.Peek().Last() as Hyperlink; if (hyperlink != null) { hyperlink.CommandParameter = parameter; } continue; } break; case '|': if (_isLink) { _isRedirectedLink = true; _tokenizer.Advance(); FlushInline(); continue; } break; case '{': if (HandleColoredText()) { continue; } break; case '\r': _tokenizer.Advance(); FlushInline(); continue; case '\n': _tokenizer.Advance(); FlushInline(); _formatStack.Peek().Add(new LineBreak()); _isNewLine = true; continue; } _isNewLine = false; _buffer.Append(_tokenizer.NextChar); _tokenizer.Advance(); } FlushInline(); }
/// <summary> /// Creates a <see cref="Field"/> from a serialized value. /// </summary> /// <param name="tokenizer">The tokenizer.</param> internal static Field Deserialize(Tokenizer tokenizer) { var fieldType = FieldType.MemoryAddress; switch (tokenizer.NextChar) { case 'd': fieldType = FieldType.PreviousValue; tokenizer.Advance(); break; case 'p': fieldType = FieldType.PriorValue; tokenizer.Advance(); break; case 'b': fieldType = FieldType.BinaryCodedDecimal; tokenizer.Advance(); break; case 'h': // explicit hex value tokenizer.Advance(); return(new Field { Type = FieldType.Value, Value = ReadHexNumber(tokenizer) }); case 'v': // explicit decimal value tokenizer.Advance(); if (tokenizer.NextChar == '-') { goto case '-'; } return(new Field { Type = FieldType.Value, Value = ReadNumber(tokenizer) }); case '-': // explicit negative decimal value tokenizer.Advance(); return(new Field { Type = FieldType.Value, Value = (uint)(-(int)ReadNumber(tokenizer)) }); } if (tokenizer.NextChar == 'f') { tokenizer.Advance(); switch (tokenizer.NextChar) { case 'F': tokenizer.Advance(); return(new Field { Size = FieldSize.Float, Type = fieldType, Value = ReadHexNumber(tokenizer) }); case 'M': tokenizer.Advance(); return(new Field { Size = FieldSize.MBF32, Type = fieldType, Value = ReadHexNumber(tokenizer) }); default: return(new Field { Type = FieldType.Float, Float = ReadFloat(tokenizer) }); } } if (!tokenizer.Match("0x")) { return new Field { Type = FieldType.Value, Value = ReadNumber(tokenizer) } } ; FieldSize size = FieldSize.None; switch (tokenizer.NextChar) { case 'm': case 'M': size = FieldSize.Bit0; tokenizer.Advance(); break; case 'n': case 'N': size = FieldSize.Bit1; tokenizer.Advance(); break; case 'o': case 'O': size = FieldSize.Bit2; tokenizer.Advance(); break; case 'p': case 'P': size = FieldSize.Bit3; tokenizer.Advance(); break; case 'q': case 'Q': size = FieldSize.Bit4; tokenizer.Advance(); break; case 'r': case 'R': size = FieldSize.Bit5; tokenizer.Advance(); break; case 's': case 'S': size = FieldSize.Bit6; tokenizer.Advance(); break; case 't': case 'T': size = FieldSize.Bit7; tokenizer.Advance(); break; case 'l': case 'L': size = FieldSize.LowNibble; tokenizer.Advance(); break; case 'u': case 'U': size = FieldSize.HighNibble; tokenizer.Advance(); break; case 'h': case 'H': size = FieldSize.Byte; tokenizer.Advance(); break; case 'w': case 'W': size = FieldSize.TByte; tokenizer.Advance(); break; case 'x': case 'X': size = FieldSize.DWord; tokenizer.Advance(); break; case 'k': case 'K': size = FieldSize.BitCount; tokenizer.Advance(); break; case 'i': case 'I': size = FieldSize.BigEndianWord; tokenizer.Advance(); break; case 'j': case 'J': size = FieldSize.BigEndianTByte; tokenizer.Advance(); break; case 'g': case 'G': size = FieldSize.BigEndianDWord; tokenizer.Advance(); break; case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': case 'A': case 'a': case 'B': case 'b': case 'C': case 'c': case 'D': case 'd': case 'E': case 'e': case 'F': case 'f': size = FieldSize.Word; break; case ' ': size = FieldSize.Word; tokenizer.Advance(); break; } return(new Field { Size = size, Type = fieldType, Value = ReadHexNumber(tokenizer) }); }