Beispiel #1
0
        public DictionaryToken(Tokeniser tokeniser, ObjectId?objectId) : base(tokeniser, objectId)
        {
            // <<
            //   /Name1 123
            //   /Name2 [(string) (array) 123]
            //   /Name3 <</subDictionaryName1 123 /subDictionaryName2 true>>
            //   /Name4 (another string)
            //   /Name5 <112233EE>
            // >>
            this.tokeniser = tokeniser;
            var b = tokeniser.SkipWhiteSpace();

            if (b != '<' || tokeniser.GetNextByte() != '<')
            {
                throw tokeniser.Exception($"illegal dictionary format, leading characters '<<' expected, but was'{(char)b}{(char)tokeniser.LookaheadByte()}'.");
            }

            //parse key
            tokens = new Dictionary <string, Token>();
            tokeniser.GetNextByte();
            b = tokeniser.SkipWhiteSpace();
            while (b != '>' && tokeniser.LookaheadByte() != '>')
            {
                if (b != '/')
                {
                    throw tokeniser.Exception($"Invalid dictionary format, '/' expected as leading character for dictionary key name, but was {(char)b}.");
                }
                var key   = new NameToken(tokeniser, null);
                var value = tokeniser.GetNextToken();
                if (key.Value == "Type" && value is NameToken typeNameToken)
                {
                    Type = typeNameToken.Value;
                }
                if (tokens.TryGetValue(key.Value, out var existingToken))
                {
                    if (existingToken is ArrayToken existingArrayToken)
                    {
                        existingArrayToken.Add(value);
                    }
                    else
                    {
                        tokens[key.Value] = new ArrayToken(tokeniser, existingToken)
                        {
                            value
                        };
                    }
                }
                else
                {
                    tokens.Add(key.Value, value);
                }
                b = tokeniser.SkipWhiteSpace();
            }
            tokeniser.GetNextByte();
            tokeniser.GetNextByte();

            StreamStartIndex = tokeniser.GetStreamStartIndex(this, out var length);
            Length           = length;
            keys             = tokens.Keys.ToArray();
        }